PR/56490
[official-gcc.git] / gcc / config / mep / mep.c
blobc190d316045fa78ea0f4c549d1f7f1e85def1215
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "except.h"
40 #include "function.h"
41 #include "optabs.h"
42 #include "reload.h"
43 #include "tm_p.h"
44 #include "ggc.h"
45 #include "diagnostic-core.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "langhooks.h"
49 #include "df.h"
50 #include "gimple.h"
51 #include "opts.h"
52 #include "dumpfile.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
58 + Reload Support
59 + Costs
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
62 + Operand Printing
63 + Function args in registers
64 + Handle pipeline hazards
65 + Handle attributes
66 + Trampolines
67 + Machine-dependent Reorg
68 + Builtins. */
70 /* Symbol encodings:
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
74 b - based
75 t - tiny
76 n - near
77 f - far
78 i - io, near
79 I - io, far
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
86 /* For varargs. */
87 int arg_regs_to_save;
88 int regsave_filler;
89 int frame_filler;
90 int frame_locked;
92 /* Records __builtin_return address. */
93 rtx eh_stack_adjust;
95 int reg_save_size;
96 int reg_save_slot[FIRST_PSEUDO_REGISTER];
97 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
99 /* 2 if the current function has an interrupt attribute, 1 if not, 0
100 if unknown. This is here because resource.c uses EPILOGUE_USES
101 which needs it. */
102 int interrupt_handler;
104 /* Likewise, for disinterrupt attribute. */
105 int disable_interrupts;
107 /* Number of doloop tags used so far. */
108 int doloop_tags;
110 /* True if the last tag was allocated to a doloop_end. */
111 bool doloop_tag_from_end;
113 /* True if reload changes $TP. */
114 bool reload_changes_tp;
116 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
117 We only set this if the function is an interrupt handler. */
118 int asms_without_operands;
121 #define MEP_CONTROL_REG(x) \
122 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
124 static GTY(()) section * based_section;
125 static GTY(()) section * tinybss_section;
126 static GTY(()) section * far_section;
127 static GTY(()) section * farbss_section;
128 static GTY(()) section * frodata_section;
129 static GTY(()) section * srodata_section;
131 static GTY(()) section * vtext_section;
132 static GTY(()) section * vftext_section;
133 static GTY(()) section * ftext_section;
135 static void mep_set_leaf_registers (int);
136 static bool symbol_p (rtx);
137 static bool symbolref_p (rtx);
138 static void encode_pattern_1 (rtx);
139 static void encode_pattern (rtx);
140 static bool const_in_range (rtx, int, int);
141 static void mep_rewrite_mult (rtx, rtx);
142 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
143 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
144 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
145 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
146 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
147 static bool mep_nongeneral_reg (rtx);
148 static bool mep_general_copro_reg (rtx);
149 static bool mep_nonregister (rtx);
150 static struct machine_function* mep_init_machine_status (void);
151 static rtx mep_tp_rtx (void);
152 static rtx mep_gp_rtx (void);
153 static bool mep_interrupt_p (void);
154 static bool mep_disinterrupt_p (void);
155 static bool mep_reg_set_p (rtx, rtx);
156 static bool mep_reg_set_in_function (int);
157 static bool mep_interrupt_saved_reg (int);
158 static bool mep_call_saves_register (int);
159 static rtx F (rtx);
160 static void add_constant (int, int, int, int);
161 static rtx maybe_dead_move (rtx, rtx, bool);
162 static void mep_reload_pointer (int, const char *);
163 static void mep_start_function (FILE *, HOST_WIDE_INT);
164 static bool mep_function_ok_for_sibcall (tree, tree);
165 static int unique_bit_in (HOST_WIDE_INT);
166 static int bit_size_for_clip (HOST_WIDE_INT);
167 static int bytesize (const_tree, enum machine_mode);
168 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
169 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
170 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
171 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
172 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
173 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
174 static bool mep_function_attribute_inlinable_p (const_tree);
175 static bool mep_can_inline_p (tree, tree);
176 static bool mep_lookup_pragma_disinterrupt (const char *);
177 static int mep_multiple_address_regions (tree, bool);
178 static int mep_attrlist_to_encoding (tree, tree);
179 static void mep_insert_attributes (tree, tree *);
180 static void mep_encode_section_info (tree, rtx, int);
181 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
182 static void mep_unique_section (tree, int);
183 static unsigned int mep_section_type_flags (tree, const char *, int);
184 static void mep_asm_named_section (const char *, unsigned int, tree);
185 static bool mep_mentioned_p (rtx, rtx, int);
186 static void mep_reorg_regmove (rtx);
187 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
188 static void mep_reorg_repeat (rtx);
189 static bool mep_invertable_branch_p (rtx);
190 static void mep_invert_branch (rtx, rtx);
191 static void mep_reorg_erepeat (rtx);
192 static void mep_jmp_return_reorg (rtx);
193 static void mep_reorg_addcombine (rtx);
194 static void mep_reorg (void);
195 static void mep_init_intrinsics (void);
196 static void mep_init_builtins (void);
197 static void mep_intrinsic_unavailable (int);
198 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
199 static bool mep_get_move_insn (int, const struct cgen_insn **);
200 static rtx mep_convert_arg (enum machine_mode, rtx);
201 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
202 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
203 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
204 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
205 static int mep_adjust_cost (rtx, rtx, rtx, int);
206 static int mep_issue_rate (void);
207 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
208 static void mep_move_ready_insn (rtx *, int, rtx);
209 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
210 static rtx mep_make_bundle (rtx, rtx);
211 static void mep_bundle_insns (rtx);
212 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
213 static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
214 static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
215 tree, int *, int);
216 static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
217 const_tree, bool);
218 static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
219 const_tree, bool);
220 static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
221 const_tree, bool);
222 static bool mep_vector_mode_supported_p (enum machine_mode);
223 static rtx mep_allocate_initial_value (rtx);
224 static void mep_asm_init_sections (void);
225 static int mep_comp_type_attributes (const_tree, const_tree);
226 static bool mep_narrow_volatile_bitfield (void);
227 static rtx mep_expand_builtin_saveregs (void);
228 static tree mep_build_builtin_va_list (void);
229 static void mep_expand_va_start (tree, rtx);
230 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
231 static bool mep_can_eliminate (const int, const int);
232 static void mep_conditional_register_usage (void);
233 static void mep_trampoline_init (rtx, tree, rtx);
235 #define WANT_GCC_DEFINITIONS
236 #include "mep-intrin.h"
237 #undef WANT_GCC_DEFINITIONS
240 /* Command Line Option Support. */
242 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
244 /* True if we can use cmov instructions to move values back and forth
245 between core and coprocessor registers. */
246 bool mep_have_core_copro_moves_p;
248 /* True if we can use cmov instructions (or a work-alike) to move
249 values between coprocessor registers. */
250 bool mep_have_copro_copro_moves_p;
252 /* A table of all coprocessor instructions that can act like
253 a coprocessor-to-coprocessor cmov. */
254 static const int mep_cmov_insns[] = {
255 mep_cmov,
256 mep_cpmov,
257 mep_fmovs,
258 mep_caddi3,
259 mep_csubi3,
260 mep_candi3,
261 mep_cori3,
262 mep_cxori3,
263 mep_cand3,
264 mep_cor3
268 static void
269 mep_set_leaf_registers (int enable)
271 int i;
273 if (mep_leaf_registers[0] != enable)
274 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
275 mep_leaf_registers[i] = enable;
278 static void
279 mep_conditional_register_usage (void)
281 int i;
283 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
285 fixed_regs[HI_REGNO] = 1;
286 fixed_regs[LO_REGNO] = 1;
287 call_used_regs[HI_REGNO] = 1;
288 call_used_regs[LO_REGNO] = 1;
291 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
292 global_regs[i] = 1;
295 static void
296 mep_option_override (void)
298 unsigned int i;
299 int j;
300 cl_deferred_option *opt;
301 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
303 if (v)
304 FOR_EACH_VEC_ELT (*v, i, opt)
306 switch (opt->opt_index)
308 case OPT_mivc2:
309 for (j = 0; j < 32; j++)
310 fixed_regs[j + 48] = 0;
311 for (j = 0; j < 32; j++)
312 call_used_regs[j + 48] = 1;
313 for (j = 6; j < 8; j++)
314 call_used_regs[j + 48] = 0;
316 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
317 RN (0, "$csar0");
318 RN (1, "$cc");
319 RN (4, "$cofr0");
320 RN (5, "$cofr1");
321 RN (6, "$cofa0");
322 RN (7, "$cofa1");
323 RN (15, "$csar1");
325 RN (16, "$acc0_0");
326 RN (17, "$acc0_1");
327 RN (18, "$acc0_2");
328 RN (19, "$acc0_3");
329 RN (20, "$acc0_4");
330 RN (21, "$acc0_5");
331 RN (22, "$acc0_6");
332 RN (23, "$acc0_7");
334 RN (24, "$acc1_0");
335 RN (25, "$acc1_1");
336 RN (26, "$acc1_2");
337 RN (27, "$acc1_3");
338 RN (28, "$acc1_4");
339 RN (29, "$acc1_5");
340 RN (30, "$acc1_6");
341 RN (31, "$acc1_7");
342 #undef RN
343 break;
345 default:
346 gcc_unreachable ();
350 if (flag_pic == 1)
351 warning (OPT_fpic, "-fpic is not supported");
352 if (flag_pic == 2)
353 warning (OPT_fPIC, "-fPIC is not supported");
354 if (TARGET_S && TARGET_M)
355 error ("only one of -ms and -mm may be given");
356 if (TARGET_S && TARGET_L)
357 error ("only one of -ms and -ml may be given");
358 if (TARGET_M && TARGET_L)
359 error ("only one of -mm and -ml may be given");
360 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
361 error ("only one of -ms and -mtiny= may be given");
362 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
363 error ("only one of -mm and -mtiny= may be given");
364 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
365 warning (0, "-mclip currently has no effect without -mminmax");
367 if (mep_const_section)
369 if (strcmp (mep_const_section, "tiny") != 0
370 && strcmp (mep_const_section, "near") != 0
371 && strcmp (mep_const_section, "far") != 0)
372 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
375 if (TARGET_S)
376 mep_tiny_cutoff = 65536;
377 if (TARGET_M)
378 mep_tiny_cutoff = 0;
379 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
380 mep_tiny_cutoff = 0;
382 if (TARGET_64BIT_CR_REGS)
383 flag_split_wide_types = 0;
385 init_machine_status = mep_init_machine_status;
386 mep_init_intrinsics ();
389 /* Pattern Support - constraints, predicates, expanders. */
391 /* MEP has very few instructions that can refer to the span of
392 addresses used by symbols, so it's common to check for them. */
394 static bool
395 symbol_p (rtx x)
397 int c = GET_CODE (x);
399 return (c == CONST_INT
400 || c == CONST
401 || c == SYMBOL_REF);
404 static bool
405 symbolref_p (rtx x)
407 int c;
409 if (GET_CODE (x) != MEM)
410 return false;
412 c = GET_CODE (XEXP (x, 0));
413 return (c == CONST_INT
414 || c == CONST
415 || c == SYMBOL_REF);
418 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
420 #define GEN_REG(R, STRICT) \
421 (GR_REGNO_P (R) \
422 || (!STRICT \
423 && ((R) == ARG_POINTER_REGNUM \
424 || (R) >= FIRST_PSEUDO_REGISTER)))
426 static char pattern[12], *patternp;
427 static GTY(()) rtx patternr[12];
428 #define RTX_IS(x) (strcmp (pattern, x) == 0)
430 static void
431 encode_pattern_1 (rtx x)
433 int i;
435 if (patternp == pattern + sizeof (pattern) - 2)
437 patternp[-1] = '?';
438 return;
441 patternr[patternp-pattern] = x;
443 switch (GET_CODE (x))
445 case REG:
446 *patternp++ = 'r';
447 break;
448 case MEM:
449 *patternp++ = 'm';
450 case CONST:
451 encode_pattern_1 (XEXP(x, 0));
452 break;
453 case PLUS:
454 *patternp++ = '+';
455 encode_pattern_1 (XEXP(x, 0));
456 encode_pattern_1 (XEXP(x, 1));
457 break;
458 case LO_SUM:
459 *patternp++ = 'L';
460 encode_pattern_1 (XEXP(x, 0));
461 encode_pattern_1 (XEXP(x, 1));
462 break;
463 case HIGH:
464 *patternp++ = 'H';
465 encode_pattern_1 (XEXP(x, 0));
466 break;
467 case SYMBOL_REF:
468 *patternp++ = 's';
469 break;
470 case LABEL_REF:
471 *patternp++ = 'l';
472 break;
473 case CONST_INT:
474 case CONST_DOUBLE:
475 *patternp++ = 'i';
476 break;
477 case UNSPEC:
478 *patternp++ = 'u';
479 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
480 for (i=0; i<XVECLEN (x, 0); i++)
481 encode_pattern_1 (XVECEXP (x, 0, i));
482 break;
483 case USE:
484 *patternp++ = 'U';
485 break;
486 default:
487 *patternp++ = '?';
488 #if 0
489 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
490 debug_rtx (x);
491 gcc_unreachable ();
492 #endif
493 break;
497 static void
498 encode_pattern (rtx x)
500 patternp = pattern;
501 encode_pattern_1 (x);
502 *patternp = 0;
506 mep_section_tag (rtx x)
508 const char *name;
510 while (1)
512 switch (GET_CODE (x))
514 case MEM:
515 case CONST:
516 x = XEXP (x, 0);
517 break;
518 case UNSPEC:
519 x = XVECEXP (x, 0, 0);
520 break;
521 case PLUS:
522 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
523 return 0;
524 x = XEXP (x, 0);
525 break;
526 default:
527 goto done;
530 done:
531 if (GET_CODE (x) != SYMBOL_REF)
532 return 0;
533 name = XSTR (x, 0);
534 if (name[0] == '@' && name[2] == '.')
536 if (name[1] == 'i' || name[1] == 'I')
538 if (name[1] == 'I')
539 return 'f'; /* near */
540 return 'n'; /* far */
542 return name[1];
544 return 0;
548 mep_regno_reg_class (int regno)
550 switch (regno)
552 case SP_REGNO: return SP_REGS;
553 case TP_REGNO: return TP_REGS;
554 case GP_REGNO: return GP_REGS;
555 case 0: return R0_REGS;
556 case HI_REGNO: return HI_REGS;
557 case LO_REGNO: return LO_REGS;
558 case ARG_POINTER_REGNUM: return GENERAL_REGS;
561 if (GR_REGNO_P (regno))
562 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
563 if (CONTROL_REGNO_P (regno))
564 return CONTROL_REGS;
566 if (CR_REGNO_P (regno))
568 int i, j;
570 /* Search for the register amongst user-defined subclasses of
571 the coprocessor registers. */
572 for (i = USER0_REGS; i <= USER3_REGS; ++i)
574 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
575 continue;
576 for (j = 0; j < N_REG_CLASSES; ++j)
578 enum reg_class sub = reg_class_subclasses[i][j];
580 if (sub == LIM_REG_CLASSES)
581 return i;
582 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
583 break;
587 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
590 if (CCR_REGNO_P (regno))
591 return CCR_REGS;
593 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
594 return NO_REGS;
597 static bool
598 const_in_range (rtx x, int minv, int maxv)
600 return (GET_CODE (x) == CONST_INT
601 && INTVAL (x) >= minv
602 && INTVAL (x) <= maxv);
605 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
606 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
607 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
608 at the end of the insn stream. */
611 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
613 if (rtx_equal_p (dest, src1))
614 return src2;
615 else if (rtx_equal_p (dest, src2))
616 return src1;
617 else
619 if (insn == 0)
620 emit_insn (gen_movsi (copy_rtx (dest), src1));
621 else
622 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
623 return src2;
627 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
628 Change the last element of PATTERN from (clobber (scratch:SI))
629 to (clobber (reg:SI HI_REGNO)). */
631 static void
632 mep_rewrite_mult (rtx insn, rtx pattern)
634 rtx hi_clobber;
636 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
637 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
638 PATTERN (insn) = pattern;
639 INSN_CODE (insn) = -1;
642 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
643 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
644 store the result in DEST if nonnull. */
646 static void
647 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
649 rtx lo, pattern;
651 lo = gen_rtx_REG (SImode, LO_REGNO);
652 if (dest)
653 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
654 mep_mulr_source (insn, dest, src1, src2));
655 else
656 pattern = gen_mulsi3_lo (lo, src1, src2);
657 mep_rewrite_mult (insn, pattern);
660 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
661 SRC3 into $lo, then use either madd or maddr. The move into $lo will
662 be deleted by a peephole2 if SRC3 is already in $lo. */
664 static void
665 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
667 rtx lo, pattern;
669 lo = gen_rtx_REG (SImode, LO_REGNO);
670 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
671 if (dest)
672 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
673 mep_mulr_source (insn, dest, src1, src2),
674 copy_rtx (lo));
675 else
676 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
677 mep_rewrite_mult (insn, pattern);
680 /* Return true if $lo has the same value as integer register GPR when
681 instruction INSN is reached. If necessary, rewrite the instruction
682 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
683 rtx for (reg:SI LO_REGNO).
685 This function is intended to be used by the peephole2 pass. Since
686 that pass goes from the end of a basic block to the beginning, and
687 propagates liveness information on the way, there is no need to
688 update register notes here.
690 If GPR_DEAD_P is true on entry, and this function returns true,
691 then the caller will replace _every_ use of GPR in and after INSN
692 with LO. This means that if the instruction that sets $lo is a
693 mulr- or maddr-type instruction, we can rewrite it to use mul or
694 madd instead. In combination with the copy progagation pass,
695 this allows us to replace sequences like:
697 mov GPR,R1
698 mulr GPR,R2
700 with:
702 mul R1,R2
704 if GPR is no longer used. */
706 static bool
707 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
711 insn = PREV_INSN (insn);
712 if (INSN_P (insn))
713 switch (recog_memoized (insn))
715 case CODE_FOR_mulsi3_1:
716 extract_insn (insn);
717 if (rtx_equal_p (recog_data.operand[0], gpr))
719 mep_rewrite_mulsi3 (insn,
720 gpr_dead_p ? NULL : recog_data.operand[0],
721 recog_data.operand[1],
722 recog_data.operand[2]);
723 return true;
725 return false;
727 case CODE_FOR_maddsi3:
728 extract_insn (insn);
729 if (rtx_equal_p (recog_data.operand[0], gpr))
731 mep_rewrite_maddsi3 (insn,
732 gpr_dead_p ? NULL : recog_data.operand[0],
733 recog_data.operand[1],
734 recog_data.operand[2],
735 recog_data.operand[3]);
736 return true;
738 return false;
740 case CODE_FOR_mulsi3r:
741 case CODE_FOR_maddsi3r:
742 extract_insn (insn);
743 return rtx_equal_p (recog_data.operand[1], gpr);
745 default:
746 if (reg_set_p (lo, insn)
747 || reg_set_p (gpr, insn)
748 || volatile_insn_p (PATTERN (insn)))
749 return false;
751 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
752 gpr_dead_p = false;
753 break;
756 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
757 return false;
760 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
762 bool
763 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
765 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
766 extract_insn (insn);
767 return result;
770 /* Return true if SET can be turned into a post-modify load or store
771 that adds OFFSET to GPR. In other words, return true if SET can be
772 changed into:
774 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
776 It's OK to change SET to an equivalent operation in order to
777 make it match. */
779 static bool
780 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
782 rtx *reg, *mem;
783 unsigned int reg_bytes, mem_bytes;
784 enum machine_mode reg_mode, mem_mode;
786 /* Only simple SETs can be converted. */
787 if (GET_CODE (set) != SET)
788 return false;
790 /* Point REG to what we hope will be the register side of the set and
791 MEM to what we hope will be the memory side. */
792 if (GET_CODE (SET_DEST (set)) == MEM)
794 mem = &SET_DEST (set);
795 reg = &SET_SRC (set);
797 else
799 reg = &SET_DEST (set);
800 mem = &SET_SRC (set);
801 if (GET_CODE (*mem) == SIGN_EXTEND)
802 mem = &XEXP (*mem, 0);
805 /* Check that *REG is a suitable coprocessor register. */
806 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
807 return false;
809 /* Check that *MEM is a suitable memory reference. */
810 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
811 return false;
813 /* Get the number of bytes in each operand. */
814 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
815 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
817 /* Check that OFFSET is suitably aligned. */
818 if (INTVAL (offset) & (mem_bytes - 1))
819 return false;
821 /* Convert *MEM to a normal integer mode. */
822 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
823 *mem = change_address (*mem, mem_mode, NULL);
825 /* Adjust *REG as well. */
826 *reg = shallow_copy_rtx (*reg);
827 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
829 /* SET is a subword load. Convert it to an explicit extension. */
830 PUT_MODE (*reg, SImode);
831 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
833 else
835 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
836 PUT_MODE (*reg, reg_mode);
838 return true;
841 /* Return the effect of frame-related instruction INSN. */
843 static rtx
844 mep_frame_expr (rtx insn)
846 rtx note, expr;
848 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
849 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
850 RTX_FRAME_RELATED_P (expr) = 1;
851 return expr;
854 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
855 new pattern in INSN1; INSN2 will be deleted by the caller. */
857 static void
858 mep_make_parallel (rtx insn1, rtx insn2)
860 rtx expr;
862 if (RTX_FRAME_RELATED_P (insn2))
864 expr = mep_frame_expr (insn2);
865 if (RTX_FRAME_RELATED_P (insn1))
866 expr = gen_rtx_SEQUENCE (VOIDmode,
867 gen_rtvec (2, mep_frame_expr (insn1), expr));
868 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
869 RTX_FRAME_RELATED_P (insn1) = 1;
872 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
873 gen_rtvec (2, PATTERN (insn1),
874 PATTERN (insn2)));
875 INSN_CODE (insn1) = -1;
878 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
879 the basic block to see if any previous load or store instruction can
880 be persuaded to do SET_INSN as a side-effect. Return true if so. */
882 static bool
883 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
885 rtx insn;
887 insn = set_insn;
890 insn = PREV_INSN (insn);
891 if (INSN_P (insn))
893 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
895 mep_make_parallel (insn, set_insn);
896 return true;
899 if (reg_set_p (reg, insn)
900 || reg_referenced_p (reg, PATTERN (insn))
901 || volatile_insn_p (PATTERN (insn)))
902 return false;
905 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
906 return false;
909 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
911 bool
912 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
914 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
915 extract_insn (insn);
916 return result;
919 bool
920 mep_allow_clip (rtx ux, rtx lx, int s)
922 HOST_WIDE_INT u = INTVAL (ux);
923 HOST_WIDE_INT l = INTVAL (lx);
924 int i;
926 if (!TARGET_OPT_CLIP)
927 return false;
929 if (s)
931 for (i = 0; i < 30; i ++)
932 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
933 && (l == - ((HOST_WIDE_INT) 1 << i)))
934 return true;
936 else
938 if (l != 0)
939 return false;
941 for (i = 0; i < 30; i ++)
942 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
943 return true;
945 return false;
948 bool
949 mep_bit_position_p (rtx x, bool looking_for)
951 if (GET_CODE (x) != CONST_INT)
952 return false;
953 switch ((int) INTVAL(x) & 0xff)
955 case 0x01: case 0x02: case 0x04: case 0x08:
956 case 0x10: case 0x20: case 0x40: case 0x80:
957 return looking_for;
958 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
959 case 0xef: case 0xdf: case 0xbf: case 0x7f:
960 return !looking_for;
962 return false;
965 static bool
966 move_needs_splitting (rtx dest, rtx src,
967 enum machine_mode mode ATTRIBUTE_UNUSED)
969 int s = mep_section_tag (src);
971 while (1)
973 if (GET_CODE (src) == CONST
974 || GET_CODE (src) == MEM)
975 src = XEXP (src, 0);
976 else if (GET_CODE (src) == SYMBOL_REF
977 || GET_CODE (src) == LABEL_REF
978 || GET_CODE (src) == PLUS)
979 break;
980 else
981 return false;
983 if (s == 'f'
984 || (GET_CODE (src) == PLUS
985 && GET_CODE (XEXP (src, 1)) == CONST_INT
986 && (INTVAL (XEXP (src, 1)) < -65536
987 || INTVAL (XEXP (src, 1)) > 0xffffff))
988 || (GET_CODE (dest) == REG
989 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
990 return true;
991 return false;
994 bool
995 mep_split_mov (rtx *operands, int symbolic)
997 if (symbolic)
999 if (move_needs_splitting (operands[0], operands[1], SImode))
1000 return true;
1001 return false;
1004 if (GET_CODE (operands[1]) != CONST_INT)
1005 return false;
1007 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1008 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1009 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1010 return false;
1012 if (((!reload_completed && !reload_in_progress)
1013 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1014 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1015 return false;
1017 return true;
1020 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1021 it to one specific value. So the insn chosen depends on whether
1022 the source and destination modes match. */
1024 bool
1025 mep_vliw_mode_match (rtx tgt)
1027 bool src_vliw = mep_vliw_function_p (cfun->decl);
1028 bool tgt_vliw = INTVAL (tgt);
1030 return src_vliw == tgt_vliw;
1033 /* Like the above, but also test for near/far mismatches. */
1035 bool
1036 mep_vliw_jmp_match (rtx tgt)
1038 bool src_vliw = mep_vliw_function_p (cfun->decl);
1039 bool tgt_vliw = INTVAL (tgt);
1041 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1042 return false;
1044 return src_vliw == tgt_vliw;
1047 bool
1048 mep_multi_slot (rtx x)
1050 return get_attr_slot (x) == SLOT_MULTI;
1053 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1055 static bool
1056 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1058 /* We can't convert symbol values to gp- or tp-rel values after
1059 reload, as reload might have used $gp or $tp for other
1060 purposes. */
1061 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1063 char e = mep_section_tag (x);
1064 return (e != 't' && e != 'b');
1066 return 1;
1069 /* Be careful not to use macros that need to be compiled one way for
1070 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1072 bool
1073 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1075 int the_tag;
1077 #define DEBUG_LEGIT 0
1078 #if DEBUG_LEGIT
1079 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1080 debug_rtx (x);
1081 #endif
1083 if (GET_CODE (x) == LO_SUM
1084 && GET_CODE (XEXP (x, 0)) == REG
1085 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1086 && CONSTANT_P (XEXP (x, 1)))
1088 if (GET_MODE_SIZE (mode) > 4)
1090 /* We will end up splitting this, and lo_sums are not
1091 offsettable for us. */
1092 #if DEBUG_LEGIT
1093 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1094 #endif
1095 return false;
1097 #if DEBUG_LEGIT
1098 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1099 #endif
1100 return true;
1103 if (GET_CODE (x) == REG
1104 && GEN_REG (REGNO (x), strict))
1106 #if DEBUG_LEGIT
1107 fprintf (stderr, " - yup, [reg]\n");
1108 #endif
1109 return true;
1112 if (GET_CODE (x) == PLUS
1113 && GET_CODE (XEXP (x, 0)) == REG
1114 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1115 && const_in_range (XEXP (x, 1), -32768, 32767))
1117 #if DEBUG_LEGIT
1118 fprintf (stderr, " - yup, [reg+const]\n");
1119 #endif
1120 return true;
1123 if (GET_CODE (x) == PLUS
1124 && GET_CODE (XEXP (x, 0)) == REG
1125 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1126 && GET_CODE (XEXP (x, 1)) == CONST
1127 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1128 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1129 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1130 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1132 #if DEBUG_LEGIT
1133 fprintf (stderr, " - yup, [reg+unspec]\n");
1134 #endif
1135 return true;
1138 the_tag = mep_section_tag (x);
1140 if (the_tag == 'f')
1142 #if DEBUG_LEGIT
1143 fprintf (stderr, " - nope, [far]\n");
1144 #endif
1145 return false;
1148 if (mode == VOIDmode
1149 && GET_CODE (x) == SYMBOL_REF)
1151 #if DEBUG_LEGIT
1152 fprintf (stderr, " - yup, call [symbol]\n");
1153 #endif
1154 return true;
1157 if ((mode == SImode || mode == SFmode)
1158 && CONSTANT_P (x)
1159 && mep_legitimate_constant_p (mode, x)
1160 && the_tag != 't' && the_tag != 'b')
1162 if (GET_CODE (x) != CONST_INT
1163 || (INTVAL (x) <= 0xfffff
1164 && INTVAL (x) >= 0
1165 && (INTVAL (x) % 4) == 0))
1167 #if DEBUG_LEGIT
1168 fprintf (stderr, " - yup, [const]\n");
1169 #endif
1170 return true;
1174 #if DEBUG_LEGIT
1175 fprintf (stderr, " - nope.\n");
1176 #endif
1177 return false;
1181 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1182 int type_i,
1183 int ind_levels ATTRIBUTE_UNUSED)
1185 enum reload_type type = (enum reload_type) type_i;
1187 if (GET_CODE (*x) == PLUS
1188 && GET_CODE (XEXP (*x, 0)) == MEM
1189 && GET_CODE (XEXP (*x, 1)) == REG)
1191 /* GCC will by default copy the MEM into a REG, which results in
1192 an invalid address. For us, the best thing to do is move the
1193 whole expression to a REG. */
1194 push_reload (*x, NULL_RTX, x, NULL,
1195 GENERAL_REGS, mode, VOIDmode,
1196 0, 0, opnum, type);
1197 return 1;
1200 if (GET_CODE (*x) == PLUS
1201 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1202 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1204 char e = mep_section_tag (XEXP (*x, 0));
1206 if (e != 't' && e != 'b')
1208 /* GCC thinks that (sym+const) is a valid address. Well,
1209 sometimes it is, this time it isn't. The best thing to
1210 do is reload the symbol to a register, since reg+int
1211 tends to work, and we can't just add the symbol and
1212 constant anyway. */
1213 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1214 GENERAL_REGS, mode, VOIDmode,
1215 0, 0, opnum, type);
1216 return 1;
1219 return 0;
1223 mep_core_address_length (rtx insn, int opn)
1225 rtx set = single_set (insn);
1226 rtx mem = XEXP (set, opn);
1227 rtx other = XEXP (set, 1-opn);
1228 rtx addr = XEXP (mem, 0);
1230 if (register_operand (addr, Pmode))
1231 return 2;
1232 if (GET_CODE (addr) == PLUS)
1234 rtx addend = XEXP (addr, 1);
1236 gcc_assert (REG_P (XEXP (addr, 0)));
1238 switch (REGNO (XEXP (addr, 0)))
1240 case STACK_POINTER_REGNUM:
1241 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1242 && mep_imm7a4_operand (addend, VOIDmode))
1243 return 2;
1244 break;
1246 case 13: /* TP */
1247 gcc_assert (REG_P (other));
1249 if (REGNO (other) >= 8)
1250 break;
1252 if (GET_CODE (addend) == CONST
1253 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1254 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1255 return 2;
1257 if (GET_CODE (addend) == CONST_INT
1258 && INTVAL (addend) >= 0
1259 && INTVAL (addend) <= 127
1260 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1261 return 2;
1262 break;
1266 return 4;
1270 mep_cop_address_length (rtx insn, int opn)
1272 rtx set = single_set (insn);
1273 rtx mem = XEXP (set, opn);
1274 rtx addr = XEXP (mem, 0);
1276 if (GET_CODE (mem) != MEM)
1277 return 2;
1278 if (register_operand (addr, Pmode))
1279 return 2;
1280 if (GET_CODE (addr) == POST_INC)
1281 return 2;
1283 return 4;
1286 #define DEBUG_EXPAND_MOV 0
1287 bool
1288 mep_expand_mov (rtx *operands, enum machine_mode mode)
1290 int i, t;
1291 int tag[2];
1292 rtx tpsym, tpoffs;
1293 int post_reload = 0;
1295 tag[0] = mep_section_tag (operands[0]);
1296 tag[1] = mep_section_tag (operands[1]);
1298 if (!reload_in_progress
1299 && !reload_completed
1300 && GET_CODE (operands[0]) != REG
1301 && GET_CODE (operands[0]) != SUBREG
1302 && GET_CODE (operands[1]) != REG
1303 && GET_CODE (operands[1]) != SUBREG)
1304 operands[1] = copy_to_mode_reg (mode, operands[1]);
1306 #if DEBUG_EXPAND_MOV
1307 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1308 reload_in_progress || reload_completed);
1309 debug_rtx (operands[0]);
1310 debug_rtx (operands[1]);
1311 #endif
1313 if (mode == DImode || mode == DFmode)
1314 return false;
1316 if (reload_in_progress || reload_completed)
1318 rtx r;
1320 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1321 cfun->machine->reload_changes_tp = true;
1323 if (tag[0] == 't' || tag[1] == 't')
1325 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1326 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1327 post_reload = 1;
1329 if (tag[0] == 'b' || tag[1] == 'b')
1331 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1332 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1333 post_reload = 1;
1335 if (cfun->machine->reload_changes_tp == true)
1336 post_reload = 1;
1339 if (!post_reload)
1341 rtx n;
1342 if (symbol_p (operands[1]))
1344 t = mep_section_tag (operands[1]);
1345 if (t == 'b' || t == 't')
1348 if (GET_CODE (operands[1]) == SYMBOL_REF)
1350 tpsym = operands[1];
1351 n = gen_rtx_UNSPEC (mode,
1352 gen_rtvec (1, operands[1]),
1353 t == 'b' ? UNS_TPREL : UNS_GPREL);
1354 n = gen_rtx_CONST (mode, n);
1356 else if (GET_CODE (operands[1]) == CONST
1357 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1358 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1359 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1361 tpsym = XEXP (XEXP (operands[1], 0), 0);
1362 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1363 n = gen_rtx_UNSPEC (mode,
1364 gen_rtvec (1, tpsym),
1365 t == 'b' ? UNS_TPREL : UNS_GPREL);
1366 n = gen_rtx_PLUS (mode, n, tpoffs);
1367 n = gen_rtx_CONST (mode, n);
1369 else if (GET_CODE (operands[1]) == CONST
1370 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1371 return false;
1372 else
1374 error ("unusual TP-relative address");
1375 return false;
1378 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1379 : mep_gp_rtx ()), n);
1380 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1381 #if DEBUG_EXPAND_MOV
1382 fprintf(stderr, "mep_expand_mov emitting ");
1383 debug_rtx(n);
1384 #endif
1385 return true;
1389 for (i=0; i < 2; i++)
1391 t = mep_section_tag (operands[i]);
1392 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1394 rtx sym, n, r;
1395 int u;
1397 sym = XEXP (operands[i], 0);
1398 if (GET_CODE (sym) == CONST
1399 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1400 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1402 if (t == 'b')
1404 r = mep_tp_rtx ();
1405 u = UNS_TPREL;
1407 else
1409 r = mep_gp_rtx ();
1410 u = UNS_GPREL;
1413 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1414 n = gen_rtx_CONST (Pmode, n);
1415 n = gen_rtx_PLUS (Pmode, r, n);
1416 operands[i] = replace_equiv_address (operands[i], n);
1421 if ((GET_CODE (operands[1]) != REG
1422 && MEP_CONTROL_REG (operands[0]))
1423 || (GET_CODE (operands[0]) != REG
1424 && MEP_CONTROL_REG (operands[1])))
1426 rtx temp;
1427 #if DEBUG_EXPAND_MOV
1428 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1429 #endif
1430 temp = gen_reg_rtx (mode);
1431 emit_move_insn (temp, operands[1]);
1432 operands[1] = temp;
1435 if (symbolref_p (operands[0])
1436 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1437 || (GET_MODE_SIZE (mode) != 4)))
1439 rtx temp;
1441 gcc_assert (!reload_in_progress && !reload_completed);
1443 temp = force_reg (Pmode, XEXP (operands[0], 0));
1444 operands[0] = replace_equiv_address (operands[0], temp);
1445 emit_move_insn (operands[0], operands[1]);
1446 return true;
1449 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1450 tag[1] = 0;
1452 if (symbol_p (operands[1])
1453 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1455 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1456 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1457 return true;
1460 if (symbolref_p (operands[1])
1461 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1463 rtx temp;
1465 if (reload_in_progress || reload_completed)
1466 temp = operands[0];
1467 else
1468 temp = gen_reg_rtx (Pmode);
1470 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1471 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1472 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1473 return true;
1476 return false;
1479 /* Cases where the pattern can't be made to use at all. */
1481 bool
1482 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1484 int i;
1486 #define DEBUG_MOV_OK 0
1487 #if DEBUG_MOV_OK
1488 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1489 mep_section_tag (operands[1]));
1490 debug_rtx (operands[0]);
1491 debug_rtx (operands[1]);
1492 #endif
1494 /* We want the movh patterns to get these. */
1495 if (GET_CODE (operands[1]) == HIGH)
1496 return false;
1498 /* We can't store a register to a far variable without using a
1499 scratch register to hold the address. Using far variables should
1500 be split by mep_emit_mov anyway. */
1501 if (mep_section_tag (operands[0]) == 'f'
1502 || mep_section_tag (operands[1]) == 'f')
1504 #if DEBUG_MOV_OK
1505 fprintf (stderr, " - no, f\n");
1506 #endif
1507 return false;
1509 i = mep_section_tag (operands[1]);
1510 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1511 /* These are supposed to be generated with adds of the appropriate
1512 register. During and after reload, however, we allow them to
1513 be accessed as normal symbols because adding a dependency on
1514 the base register now might cause problems. */
1516 #if DEBUG_MOV_OK
1517 fprintf (stderr, " - no, bt\n");
1518 #endif
1519 return false;
1522 /* The only moves we can allow involve at least one general
1523 register, so require it. */
1524 for (i = 0; i < 2; i ++)
1526 /* Allow subregs too, before reload. */
1527 rtx x = operands[i];
1529 if (GET_CODE (x) == SUBREG)
1530 x = XEXP (x, 0);
1531 if (GET_CODE (x) == REG
1532 && ! MEP_CONTROL_REG (x))
1534 #if DEBUG_MOV_OK
1535 fprintf (stderr, " - ok\n");
1536 #endif
1537 return true;
1540 #if DEBUG_MOV_OK
1541 fprintf (stderr, " - no, no gen reg\n");
1542 #endif
1543 return false;
1546 #define DEBUG_SPLIT_WIDE_MOVE 0
1547 void
1548 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1550 int i;
1552 #if DEBUG_SPLIT_WIDE_MOVE
1553 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1554 debug_rtx (operands[0]);
1555 debug_rtx (operands[1]);
1556 #endif
1558 for (i = 0; i <= 1; i++)
1560 rtx op = operands[i], hi, lo;
1562 switch (GET_CODE (op))
1564 case REG:
1566 unsigned int regno = REGNO (op);
1568 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1570 rtx i32;
1572 lo = gen_rtx_REG (SImode, regno);
1573 i32 = GEN_INT (32);
1574 hi = gen_rtx_ZERO_EXTRACT (SImode,
1575 gen_rtx_REG (DImode, regno),
1576 i32, i32);
1578 else
1580 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1581 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1584 break;
1586 case CONST_INT:
1587 case CONST_DOUBLE:
1588 case MEM:
1589 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1590 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1591 break;
1593 default:
1594 gcc_unreachable ();
1597 /* The high part of CR <- GPR moves must be done after the low part. */
1598 operands [i + 4] = lo;
1599 operands [i + 2] = hi;
1602 if (reg_mentioned_p (operands[2], operands[5])
1603 || GET_CODE (operands[2]) == ZERO_EXTRACT
1604 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1606 rtx tmp;
1608 /* Overlapping register pairs -- make sure we don't
1609 early-clobber ourselves. */
1610 tmp = operands[2];
1611 operands[2] = operands[4];
1612 operands[4] = tmp;
1613 tmp = operands[3];
1614 operands[3] = operands[5];
1615 operands[5] = tmp;
1618 #if DEBUG_SPLIT_WIDE_MOVE
1619 fprintf(stderr, "\033[34m");
1620 debug_rtx (operands[2]);
1621 debug_rtx (operands[3]);
1622 debug_rtx (operands[4]);
1623 debug_rtx (operands[5]);
1624 fprintf(stderr, "\033[0m");
1625 #endif
1628 /* Emit a setcc instruction in its entirity. */
1630 static bool
1631 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1633 rtx tmp;
1635 switch (code)
1637 case GT:
1638 case GTU:
1639 tmp = op1, op1 = op2, op2 = tmp;
1640 code = swap_condition (code);
1641 /* FALLTHRU */
1643 case LT:
1644 case LTU:
1645 op1 = force_reg (SImode, op1);
1646 emit_insn (gen_rtx_SET (VOIDmode, dest,
1647 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1648 return true;
1650 case EQ:
1651 if (op2 != const0_rtx)
1652 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1653 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1654 return true;
1656 case NE:
1657 /* Branchful sequence:
1658 mov dest, 0 16-bit
1659 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1660 mov dest, 1 16-bit
1662 Branchless sequence:
1663 add3 tmp, op1, -op2 32-bit (or mov + sub)
1664 sltu3 tmp, tmp, 1 16-bit
1665 xor3 dest, tmp, 1 32-bit
1667 if (optimize_size && op2 != const0_rtx)
1668 return false;
1670 if (op2 != const0_rtx)
1671 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1673 op2 = gen_reg_rtx (SImode);
1674 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1676 emit_insn (gen_rtx_SET (VOIDmode, dest,
1677 gen_rtx_XOR (SImode, op2, const1_rtx)));
1678 return true;
1680 case LE:
1681 if (GET_CODE (op2) != CONST_INT
1682 || INTVAL (op2) == 0x7ffffff)
1683 return false;
1684 op2 = GEN_INT (INTVAL (op2) + 1);
1685 return mep_expand_setcc_1 (LT, dest, op1, op2);
1687 case LEU:
1688 if (GET_CODE (op2) != CONST_INT
1689 || INTVAL (op2) == -1)
1690 return false;
1691 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1692 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1694 case GE:
1695 if (GET_CODE (op2) != CONST_INT
1696 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1697 return false;
1698 op2 = GEN_INT (INTVAL (op2) - 1);
1699 return mep_expand_setcc_1 (GT, dest, op1, op2);
1701 case GEU:
1702 if (GET_CODE (op2) != CONST_INT
1703 || op2 == const0_rtx)
1704 return false;
1705 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1706 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1708 default:
1709 gcc_unreachable ();
1713 bool
1714 mep_expand_setcc (rtx *operands)
1716 rtx dest = operands[0];
1717 enum rtx_code code = GET_CODE (operands[1]);
1718 rtx op0 = operands[2];
1719 rtx op1 = operands[3];
1721 return mep_expand_setcc_1 (code, dest, op0, op1);
1725 mep_expand_cbranch (rtx *operands)
1727 enum rtx_code code = GET_CODE (operands[0]);
1728 rtx op0 = operands[1];
1729 rtx op1 = operands[2];
1730 rtx tmp;
1732 restart:
1733 switch (code)
1735 case LT:
1736 if (mep_imm4_operand (op1, SImode))
1737 break;
1739 tmp = gen_reg_rtx (SImode);
1740 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1741 code = NE;
1742 op0 = tmp;
1743 op1 = const0_rtx;
1744 break;
1746 case GE:
1747 if (mep_imm4_operand (op1, SImode))
1748 break;
1750 tmp = gen_reg_rtx (SImode);
1751 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1753 code = EQ;
1754 op0 = tmp;
1755 op1 = const0_rtx;
1756 break;
1758 case EQ:
1759 case NE:
1760 if (! mep_reg_or_imm4_operand (op1, SImode))
1761 op1 = force_reg (SImode, op1);
1762 break;
1764 case LE:
1765 case GT:
1766 if (GET_CODE (op1) == CONST_INT
1767 && INTVAL (op1) != 0x7fffffff)
1769 op1 = GEN_INT (INTVAL (op1) + 1);
1770 code = (code == LE ? LT : GE);
1771 goto restart;
1774 tmp = gen_reg_rtx (SImode);
1775 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1777 code = (code == LE ? EQ : NE);
1778 op0 = tmp;
1779 op1 = const0_rtx;
1780 break;
1782 case LTU:
1783 if (op1 == const1_rtx)
1785 code = EQ;
1786 op1 = const0_rtx;
1787 break;
1790 tmp = gen_reg_rtx (SImode);
1791 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1792 code = NE;
1793 op0 = tmp;
1794 op1 = const0_rtx;
1795 break;
1797 case LEU:
1798 tmp = gen_reg_rtx (SImode);
1799 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1800 code = NE;
1801 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1802 code = EQ;
1803 else
1804 gcc_unreachable ();
1805 op0 = tmp;
1806 op1 = const0_rtx;
1807 break;
1809 case GTU:
1810 tmp = gen_reg_rtx (SImode);
1811 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1812 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1813 code = NE;
1814 op0 = tmp;
1815 op1 = const0_rtx;
1816 break;
1818 case GEU:
1819 tmp = gen_reg_rtx (SImode);
1820 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1821 code = NE;
1822 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1823 code = EQ;
1824 else
1825 gcc_unreachable ();
1826 op0 = tmp;
1827 op1 = const0_rtx;
1828 break;
1830 default:
1831 gcc_unreachable ();
1834 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1837 const char *
1838 mep_emit_cbranch (rtx *operands, int ne)
1840 if (GET_CODE (operands[1]) == REG)
1841 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1842 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1843 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1844 else
1845 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1848 void
1849 mep_expand_call (rtx *operands, int returns_value)
1851 rtx addr = operands[returns_value];
1852 rtx tp = mep_tp_rtx ();
1853 rtx gp = mep_gp_rtx ();
1855 gcc_assert (GET_CODE (addr) == MEM);
1857 addr = XEXP (addr, 0);
1859 if (! mep_call_address_operand (addr, VOIDmode))
1860 addr = force_reg (SImode, addr);
1862 if (! operands[returns_value+2])
1863 operands[returns_value+2] = const0_rtx;
1865 if (returns_value)
1866 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1867 operands[3], tp, gp));
1868 else
1869 emit_call_insn (gen_call_internal (addr, operands[1],
1870 operands[2], tp, gp));
1873 /* Aliasing Support. */
1875 /* If X is a machine specific address (i.e. a symbol or label being
1876 referenced as a displacement from the GOT implemented using an
1877 UNSPEC), then return the base term. Otherwise return X. */
1880 mep_find_base_term (rtx x)
1882 rtx base, term;
1883 int unspec;
1885 if (GET_CODE (x) != PLUS)
1886 return x;
1887 base = XEXP (x, 0);
1888 term = XEXP (x, 1);
1890 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1891 && base == mep_tp_rtx ())
1892 unspec = UNS_TPREL;
1893 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1894 && base == mep_gp_rtx ())
1895 unspec = UNS_GPREL;
1896 else
1897 return x;
1899 if (GET_CODE (term) != CONST)
1900 return x;
1901 term = XEXP (term, 0);
1903 if (GET_CODE (term) != UNSPEC
1904 || XINT (term, 1) != unspec)
1905 return x;
1907 return XVECEXP (term, 0, 0);
1910 /* Reload Support. */
1912 /* Return true if the registers in CLASS cannot represent the change from
1913 modes FROM to TO. */
1915 bool
1916 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1917 enum reg_class regclass)
1919 if (from == to)
1920 return false;
1922 /* 64-bit COP regs must remain 64-bit COP regs. */
1923 if (TARGET_64BIT_CR_REGS
1924 && (regclass == CR_REGS
1925 || regclass == LOADABLE_CR_REGS)
1926 && (GET_MODE_SIZE (to) < 8
1927 || GET_MODE_SIZE (from) < 8))
1928 return true;
1930 return false;
1933 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1935 static bool
1936 mep_general_reg (rtx x)
1938 while (GET_CODE (x) == SUBREG)
1939 x = XEXP (x, 0);
1940 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1943 static bool
1944 mep_nongeneral_reg (rtx x)
1946 while (GET_CODE (x) == SUBREG)
1947 x = XEXP (x, 0);
1948 return (GET_CODE (x) == REG
1949 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1952 static bool
1953 mep_general_copro_reg (rtx x)
1955 while (GET_CODE (x) == SUBREG)
1956 x = XEXP (x, 0);
1957 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1960 static bool
1961 mep_nonregister (rtx x)
1963 while (GET_CODE (x) == SUBREG)
1964 x = XEXP (x, 0);
1965 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1968 #define DEBUG_RELOAD 0
1970 /* Return the secondary reload class needed for moving value X to or
1971 from a register in coprocessor register class CLASS. */
1973 static enum reg_class
1974 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1976 if (mep_general_reg (x))
1977 /* We can do the move directly if mep_have_core_copro_moves_p,
1978 otherwise we need to go through memory. Either way, no secondary
1979 register is needed. */
1980 return NO_REGS;
1982 if (mep_general_copro_reg (x))
1984 /* We can do the move directly if mep_have_copro_copro_moves_p. */
1985 if (mep_have_copro_copro_moves_p)
1986 return NO_REGS;
1988 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
1989 if (mep_have_core_copro_moves_p)
1990 return GENERAL_REGS;
1992 /* Otherwise we need to do it through memory. No secondary
1993 register is needed. */
1994 return NO_REGS;
1997 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
1998 && constraint_satisfied_p (x, CONSTRAINT_U))
1999 /* X is a memory value that we can access directly. */
2000 return NO_REGS;
2002 /* We have to move X into a GPR first and then copy it to
2003 the coprocessor register. The move from the GPR to the
2004 coprocessor might be done directly or through memory,
2005 depending on mep_have_core_copro_moves_p. */
2006 return GENERAL_REGS;
2009 /* Copying X to register in RCLASS. */
2011 enum reg_class
2012 mep_secondary_input_reload_class (enum reg_class rclass,
2013 enum machine_mode mode ATTRIBUTE_UNUSED,
2014 rtx x)
2016 int rv = NO_REGS;
2018 #if DEBUG_RELOAD
2019 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2020 debug_rtx (x);
2021 #endif
2023 if (reg_class_subset_p (rclass, CR_REGS))
2024 rv = mep_secondary_copro_reload_class (rclass, x);
2025 else if (MEP_NONGENERAL_CLASS (rclass)
2026 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2027 rv = GENERAL_REGS;
2029 #if DEBUG_RELOAD
2030 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2031 #endif
2032 return (enum reg_class) rv;
2035 /* Copying register in RCLASS to X. */
2037 enum reg_class
2038 mep_secondary_output_reload_class (enum reg_class rclass,
2039 enum machine_mode mode ATTRIBUTE_UNUSED,
2040 rtx x)
2042 int rv = NO_REGS;
2044 #if DEBUG_RELOAD
2045 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2046 debug_rtx (x);
2047 #endif
2049 if (reg_class_subset_p (rclass, CR_REGS))
2050 rv = mep_secondary_copro_reload_class (rclass, x);
2051 else if (MEP_NONGENERAL_CLASS (rclass)
2052 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2053 rv = GENERAL_REGS;
2055 #if DEBUG_RELOAD
2056 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2057 #endif
2059 return (enum reg_class) rv;
2062 /* Implement SECONDARY_MEMORY_NEEDED. */
2064 bool
2065 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2066 enum machine_mode mode ATTRIBUTE_UNUSED)
2068 if (!mep_have_core_copro_moves_p)
2070 if (reg_classes_intersect_p (rclass1, CR_REGS)
2071 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2072 return true;
2073 if (reg_classes_intersect_p (rclass2, CR_REGS)
2074 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2075 return true;
2076 if (!mep_have_copro_copro_moves_p
2077 && reg_classes_intersect_p (rclass1, CR_REGS)
2078 && reg_classes_intersect_p (rclass2, CR_REGS))
2079 return true;
2081 return false;
2084 void
2085 mep_expand_reload (rtx *operands, enum machine_mode mode)
2087 /* There are three cases for each direction:
2088 register, farsym
2089 control, farsym
2090 control, nearsym */
2092 int s0 = mep_section_tag (operands[0]) == 'f';
2093 int s1 = mep_section_tag (operands[1]) == 'f';
2094 int c0 = mep_nongeneral_reg (operands[0]);
2095 int c1 = mep_nongeneral_reg (operands[1]);
2096 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2098 #if DEBUG_RELOAD
2099 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2100 debug_rtx (operands[0]);
2101 debug_rtx (operands[1]);
2102 #endif
2104 switch (which)
2106 case 00: /* Don't know why this gets here. */
2107 case 02: /* general = far */
2108 emit_move_insn (operands[0], operands[1]);
2109 return;
2111 case 10: /* cr = mem */
2112 case 11: /* cr = cr */
2113 case 01: /* mem = cr */
2114 case 12: /* cr = far */
2115 emit_move_insn (operands[2], operands[1]);
2116 emit_move_insn (operands[0], operands[2]);
2117 return;
2119 case 20: /* far = general */
2120 emit_move_insn (operands[2], XEXP (operands[1], 0));
2121 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2122 return;
2124 case 21: /* far = cr */
2125 case 22: /* far = far */
2126 default:
2127 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2128 which, mode_name[mode]);
2129 debug_rtx (operands[0]);
2130 debug_rtx (operands[1]);
2131 gcc_unreachable ();
2135 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2136 can be moved directly into registers 0 to 7, but not into the rest.
2137 If so, and if the required class includes registers 0 to 7, restrict
2138 it to those registers. */
2140 enum reg_class
2141 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2143 switch (GET_CODE (x))
2145 case CONST_INT:
2146 if (INTVAL (x) >= 0x10000
2147 && INTVAL (x) < 0x01000000
2148 && (INTVAL (x) & 0xffff) != 0
2149 && reg_class_subset_p (TPREL_REGS, rclass))
2150 rclass = TPREL_REGS;
2151 break;
2153 case CONST:
2154 case SYMBOL_REF:
2155 case LABEL_REF:
2156 if (mep_section_tag (x) != 'f'
2157 && reg_class_subset_p (TPREL_REGS, rclass))
2158 rclass = TPREL_REGS;
2159 break;
2161 default:
2162 break;
2164 return rclass;
2167 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2168 moves, 4 for direct double-register moves, and 1000 for anything
2169 that requires a temporary register or temporary stack slot. */
2172 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2174 if (mep_have_copro_copro_moves_p
2175 && reg_class_subset_p (from, CR_REGS)
2176 && reg_class_subset_p (to, CR_REGS))
2178 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2179 return 4;
2180 return 2;
2182 if (reg_class_subset_p (from, CR_REGS)
2183 && reg_class_subset_p (to, CR_REGS))
2185 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2186 return 8;
2187 return 4;
2189 if (reg_class_subset_p (from, CR_REGS)
2190 || reg_class_subset_p (to, CR_REGS))
2192 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2193 return 4;
2194 return 2;
2196 if (mep_secondary_memory_needed (from, to, mode))
2197 return 1000;
2198 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2199 return 1000;
2201 if (GET_MODE_SIZE (mode) > 4)
2202 return 4;
2204 return 2;
2208 /* Functions to save and restore machine-specific function data. */
2210 static struct machine_function *
2211 mep_init_machine_status (void)
2213 return ggc_alloc_cleared_machine_function ();
2216 static rtx
2217 mep_allocate_initial_value (rtx reg)
2219 int rss;
2221 if (GET_CODE (reg) != REG)
2222 return NULL_RTX;
2224 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2225 return NULL_RTX;
2227 /* In interrupt functions, the "initial" values of $gp and $tp are
2228 provided by the prologue. They are not necessarily the same as
2229 the values that the caller was using. */
2230 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2231 if (mep_interrupt_p ())
2232 return NULL_RTX;
2234 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2236 cfun->machine->reg_save_size += 4;
2237 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2240 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2241 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2245 mep_return_addr_rtx (int count)
2247 if (count != 0)
2248 return const0_rtx;
2250 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2253 static rtx
2254 mep_tp_rtx (void)
2256 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2259 static rtx
2260 mep_gp_rtx (void)
2262 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2265 static bool
2266 mep_interrupt_p (void)
2268 if (cfun->machine->interrupt_handler == 0)
2270 int interrupt_handler
2271 = (lookup_attribute ("interrupt",
2272 DECL_ATTRIBUTES (current_function_decl))
2273 != NULL_TREE);
2274 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2276 return cfun->machine->interrupt_handler == 2;
2279 static bool
2280 mep_disinterrupt_p (void)
2282 if (cfun->machine->disable_interrupts == 0)
2284 int disable_interrupts
2285 = (lookup_attribute ("disinterrupt",
2286 DECL_ATTRIBUTES (current_function_decl))
2287 != NULL_TREE);
2288 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2290 return cfun->machine->disable_interrupts == 2;
2294 /* Frame/Epilog/Prolog Related. */
2296 static bool
2297 mep_reg_set_p (rtx reg, rtx insn)
2299 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2300 if (INSN_P (insn))
2302 if (FIND_REG_INC_NOTE (insn, reg))
2303 return true;
2304 insn = PATTERN (insn);
2307 if (GET_CODE (insn) == SET
2308 && GET_CODE (XEXP (insn, 0)) == REG
2309 && GET_CODE (XEXP (insn, 1)) == REG
2310 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2311 return false;
2313 return set_of (reg, insn) != NULL_RTX;
2317 #define MEP_SAVES_UNKNOWN 0
2318 #define MEP_SAVES_YES 1
2319 #define MEP_SAVES_MAYBE 2
2320 #define MEP_SAVES_NO 3
2322 static bool
2323 mep_reg_set_in_function (int regno)
2325 rtx reg, insn;
2327 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2328 return true;
2330 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2331 return true;
2333 push_topmost_sequence ();
2334 insn = get_insns ();
2335 pop_topmost_sequence ();
2337 if (!insn)
2338 return false;
2340 reg = gen_rtx_REG (SImode, regno);
2342 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2343 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2344 return true;
2345 return false;
2348 static bool
2349 mep_asm_without_operands_p (void)
2351 if (cfun->machine->asms_without_operands == 0)
2353 rtx insn;
2355 push_topmost_sequence ();
2356 insn = get_insns ();
2357 pop_topmost_sequence ();
2359 cfun->machine->asms_without_operands = 1;
2360 while (insn)
2362 if (INSN_P (insn)
2363 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2365 cfun->machine->asms_without_operands = 2;
2366 break;
2368 insn = NEXT_INSN (insn);
2372 return cfun->machine->asms_without_operands == 2;
2375 /* Interrupt functions save/restore every call-preserved register, and
2376 any call-used register it uses (or all if it calls any function,
2377 since they may get clobbered there too). Here we check to see
2378 which call-used registers need saving. */
2380 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2381 && (r == FIRST_CCR_REGNO + 1 \
2382 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2383 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2385 static bool
2386 mep_interrupt_saved_reg (int r)
2388 if (!mep_interrupt_p ())
2389 return false;
2390 if (r == REGSAVE_CONTROL_TEMP
2391 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2392 return true;
2393 if (mep_asm_without_operands_p ()
2394 && (!fixed_regs[r]
2395 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2396 || IVC2_ISAVED_REG (r)))
2397 return true;
2398 if (!crtl->is_leaf)
2399 /* Function calls mean we need to save $lp. */
2400 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2401 return true;
2402 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2403 /* The interrupt handler might use these registers for repeat blocks,
2404 or it might call a function that does so. */
2405 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2406 return true;
2407 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2408 return false;
2409 /* Functions we call might clobber these. */
2410 if (call_used_regs[r] && !fixed_regs[r])
2411 return true;
2412 /* Additional registers that need to be saved for IVC2. */
2413 if (IVC2_ISAVED_REG (r))
2414 return true;
2416 return false;
2419 static bool
2420 mep_call_saves_register (int r)
2422 if (! cfun->machine->frame_locked)
2424 int rv = MEP_SAVES_NO;
2426 if (cfun->machine->reg_save_slot[r])
2427 rv = MEP_SAVES_YES;
2428 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2429 rv = MEP_SAVES_YES;
2430 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2431 rv = MEP_SAVES_YES;
2432 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2433 rv = MEP_SAVES_YES;
2434 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2435 /* We need these to have stack slots so that they can be set during
2436 unwinding. */
2437 rv = MEP_SAVES_YES;
2438 else if (mep_interrupt_saved_reg (r))
2439 rv = MEP_SAVES_YES;
2440 cfun->machine->reg_saved[r] = rv;
2442 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2445 /* Return true if epilogue uses register REGNO. */
2447 bool
2448 mep_epilogue_uses (int regno)
2450 /* Since $lp is a call-saved register, the generic code will normally
2451 mark it used in the epilogue if it needs to be saved and restored.
2452 However, when profiling is enabled, the profiling code will implicitly
2453 clobber $11. This case has to be handled specially both here and in
2454 mep_call_saves_register. */
2455 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2456 return true;
2457 /* Interrupt functions save/restore pretty much everything. */
2458 return (reload_completed && mep_interrupt_saved_reg (regno));
2461 static int
2462 mep_reg_size (int regno)
2464 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2465 return 8;
2466 return 4;
2469 /* Worker function for TARGET_CAN_ELIMINATE. */
2471 bool
2472 mep_can_eliminate (const int from, const int to)
2474 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2475 ? ! frame_pointer_needed
2476 : true);
2480 mep_elimination_offset (int from, int to)
2482 int reg_save_size;
2483 int i;
2484 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2485 int total_size;
2487 if (!cfun->machine->frame_locked)
2488 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2490 /* We don't count arg_regs_to_save in the arg pointer offset, because
2491 gcc thinks the arg pointer has moved along with the saved regs.
2492 However, we do count it when we adjust $sp in the prologue. */
2493 reg_save_size = 0;
2494 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2495 if (mep_call_saves_register (i))
2496 reg_save_size += mep_reg_size (i);
2498 if (reg_save_size % 8)
2499 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2500 else
2501 cfun->machine->regsave_filler = 0;
2503 /* This is what our total stack adjustment looks like. */
2504 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2506 if (total_size % 8)
2507 cfun->machine->frame_filler = 8 - (total_size % 8);
2508 else
2509 cfun->machine->frame_filler = 0;
2512 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2513 return reg_save_size + cfun->machine->regsave_filler;
2515 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2516 return cfun->machine->frame_filler + frame_size;
2518 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2519 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2521 gcc_unreachable ();
2524 static rtx
2525 F (rtx x)
2527 RTX_FRAME_RELATED_P (x) = 1;
2528 return x;
2531 /* Since the prologue/epilogue code is generated after optimization,
2532 we can't rely on gcc to split constants for us. So, this code
2533 captures all the ways to add a constant to a register in one logic
2534 chunk, including optimizing away insns we just don't need. This
2535 makes the prolog/epilog code easier to follow. */
2536 static void
2537 add_constant (int dest, int src, int value, int mark_frame)
2539 rtx insn;
2540 int hi, lo;
2542 if (src == dest && value == 0)
2543 return;
2545 if (value == 0)
2547 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2548 gen_rtx_REG (SImode, src));
2549 if (mark_frame)
2550 RTX_FRAME_RELATED_P(insn) = 1;
2551 return;
2554 if (value >= -32768 && value <= 32767)
2556 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2557 gen_rtx_REG (SImode, src),
2558 GEN_INT (value)));
2559 if (mark_frame)
2560 RTX_FRAME_RELATED_P(insn) = 1;
2561 return;
2564 /* Big constant, need to use a temp register. We use
2565 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2566 area is always small enough to directly add to). */
2568 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2569 lo = value & 0xffff;
2571 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2572 GEN_INT (hi));
2574 if (lo)
2576 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2577 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2578 GEN_INT (lo)));
2581 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2582 gen_rtx_REG (SImode, src),
2583 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2584 if (mark_frame)
2586 RTX_FRAME_RELATED_P(insn) = 1;
2587 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2588 gen_rtx_SET (SImode,
2589 gen_rtx_REG (SImode, dest),
2590 gen_rtx_PLUS (SImode,
2591 gen_rtx_REG (SImode, dest),
2592 GEN_INT (value))));
2596 /* Move SRC to DEST. Mark the move as being potentially dead if
2597 MAYBE_DEAD_P. */
2599 static rtx
2600 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2602 rtx insn = emit_move_insn (dest, src);
2603 #if 0
2604 if (maybe_dead_p)
2605 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2606 #endif
2607 return insn;
2610 /* Used for interrupt functions, which can't assume that $tp and $gp
2611 contain the correct pointers. */
2613 static void
2614 mep_reload_pointer (int regno, const char *symbol)
2616 rtx reg, sym;
2618 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2619 return;
2621 reg = gen_rtx_REG (SImode, regno);
2622 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2623 emit_insn (gen_movsi_topsym_s (reg, sym));
2624 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2627 /* Assign save slots for any register not already saved. DImode
2628 registers go at the end of the reg save area; the rest go at the
2629 beginning. This is for alignment purposes. Returns true if a frame
2630 is really needed. */
2631 static bool
2632 mep_assign_save_slots (int reg_save_size)
2634 bool really_need_stack_frame = false;
2635 int di_ofs = 0;
2636 int i;
2638 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2639 if (mep_call_saves_register(i))
2641 int regsize = mep_reg_size (i);
2643 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2644 || mep_reg_set_in_function (i))
2645 really_need_stack_frame = true;
2647 if (cfun->machine->reg_save_slot[i])
2648 continue;
2650 if (regsize < 8)
2652 cfun->machine->reg_save_size += regsize;
2653 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2655 else
2657 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2658 di_ofs += 8;
2661 cfun->machine->frame_locked = 1;
2662 return really_need_stack_frame;
2665 void
2666 mep_expand_prologue (void)
2668 int i, rss, sp_offset = 0;
2669 int reg_save_size;
2670 int frame_size;
2671 int really_need_stack_frame;
2673 /* We must not allow register renaming in interrupt functions,
2674 because that invalidates the correctness of the set of call-used
2675 registers we're going to save/restore. */
2676 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2678 if (mep_disinterrupt_p ())
2679 emit_insn (gen_mep_disable_int ());
2681 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2683 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2684 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2685 really_need_stack_frame = frame_size;
2687 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2689 sp_offset = reg_save_size;
2690 if (sp_offset + frame_size < 128)
2691 sp_offset += frame_size ;
2693 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2695 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2696 if (mep_call_saves_register(i))
2698 rtx mem;
2699 bool maybe_dead_p;
2700 enum machine_mode rmode;
2702 rss = cfun->machine->reg_save_slot[i];
2704 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2705 && (!mep_reg_set_in_function (i)
2706 && !mep_interrupt_p ()))
2707 continue;
2709 if (mep_reg_size (i) == 8)
2710 rmode = DImode;
2711 else
2712 rmode = SImode;
2714 /* If there is a pseudo associated with this register's initial value,
2715 reload might have already spilt it to the stack slot suggested by
2716 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2717 deleted as dead. */
2718 mem = gen_rtx_MEM (rmode,
2719 plus_constant (Pmode, stack_pointer_rtx,
2720 sp_offset - rss));
2721 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2723 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2724 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2725 else if (rmode == DImode)
2727 rtx insn;
2728 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2730 mem = gen_rtx_MEM (SImode,
2731 plus_constant (Pmode, stack_pointer_rtx,
2732 sp_offset - rss + be));
2734 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2735 gen_rtx_REG (SImode, i),
2736 maybe_dead_p);
2737 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2738 gen_rtx_ZERO_EXTRACT (SImode,
2739 gen_rtx_REG (DImode, i),
2740 GEN_INT (32),
2741 GEN_INT (32)),
2742 maybe_dead_p);
2743 insn = maybe_dead_move (mem,
2744 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2745 maybe_dead_p);
2746 RTX_FRAME_RELATED_P (insn) = 1;
2748 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2749 gen_rtx_SET (VOIDmode,
2750 copy_rtx (mem),
2751 gen_rtx_REG (rmode, i)));
2752 mem = gen_rtx_MEM (SImode,
2753 plus_constant (Pmode, stack_pointer_rtx,
2754 sp_offset - rss + (4-be)));
2755 insn = maybe_dead_move (mem,
2756 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2757 maybe_dead_p);
2759 else
2761 rtx insn;
2762 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2763 gen_rtx_REG (rmode, i),
2764 maybe_dead_p);
2765 insn = maybe_dead_move (mem,
2766 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2767 maybe_dead_p);
2768 RTX_FRAME_RELATED_P (insn) = 1;
2770 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2771 gen_rtx_SET (VOIDmode,
2772 copy_rtx (mem),
2773 gen_rtx_REG (rmode, i)));
2777 if (frame_pointer_needed)
2779 /* We've already adjusted down by sp_offset. Total $sp change
2780 is reg_save_size + frame_size. We want a net change here of
2781 just reg_save_size. */
2782 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2785 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2787 if (mep_interrupt_p ())
2789 mep_reload_pointer(GP_REGNO, "__sdabase");
2790 mep_reload_pointer(TP_REGNO, "__tpbase");
2794 static void
2795 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2797 int local = hwi_local;
2798 int frame_size = local + crtl->outgoing_args_size;
2799 int reg_save_size;
2800 int ffill;
2801 int i, sp, skip;
2802 int sp_offset;
2803 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2805 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2806 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2807 sp_offset = reg_save_size + frame_size;
2809 ffill = cfun->machine->frame_filler;
2811 if (cfun->machine->mep_frame_pointer_needed)
2812 reg_names[FP_REGNO] = "$fp";
2813 else
2814 reg_names[FP_REGNO] = "$8";
2816 if (sp_offset == 0)
2817 return;
2819 if (debug_info_level == DINFO_LEVEL_NONE)
2821 fprintf (file, "\t# frame: %d", sp_offset);
2822 if (reg_save_size)
2823 fprintf (file, " %d regs", reg_save_size);
2824 if (local)
2825 fprintf (file, " %d locals", local);
2826 if (crtl->outgoing_args_size)
2827 fprintf (file, " %d args", crtl->outgoing_args_size);
2828 fprintf (file, "\n");
2829 return;
2832 fprintf (file, "\t#\n");
2833 fprintf (file, "\t# Initial Frame Information:\n");
2834 if (sp_offset || !frame_pointer_needed)
2835 fprintf (file, "\t# Entry ---------- 0\n");
2837 /* Sort registers by save slots, so they're printed in the order
2838 they appear in memory, not the order they're saved in. */
2839 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2840 slot_map[si] = si;
2841 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2842 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2843 if (cfun->machine->reg_save_slot[slot_map[si]]
2844 > cfun->machine->reg_save_slot[slot_map[sj]])
2846 int t = slot_map[si];
2847 slot_map[si] = slot_map[sj];
2848 slot_map[sj] = t;
2851 sp = 0;
2852 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2854 int rsize;
2855 int r = slot_map[i];
2856 int rss = cfun->machine->reg_save_slot[r];
2858 if (!mep_call_saves_register (r))
2859 continue;
2861 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2862 && (!mep_reg_set_in_function (r)
2863 && !mep_interrupt_p ()))
2864 continue;
2866 rsize = mep_reg_size(r);
2867 skip = rss - (sp+rsize);
2868 if (skip)
2869 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2870 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2871 rsize, reg_names[r], sp_offset - rss);
2872 sp = rss;
2875 skip = reg_save_size - sp;
2876 if (skip)
2877 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2879 if (frame_pointer_needed)
2880 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2881 if (local)
2882 fprintf (file, "\t# %3d bytes for local vars\n", local);
2883 if (ffill)
2884 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2885 if (crtl->outgoing_args_size)
2886 fprintf (file, "\t# %3d bytes for outgoing args\n",
2887 crtl->outgoing_args_size);
2888 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2889 fprintf (file, "\t#\n");
2893 static int mep_prevent_lp_restore = 0;
2894 static int mep_sibcall_epilogue = 0;
2896 void
2897 mep_expand_epilogue (void)
2899 int i, sp_offset = 0;
2900 int reg_save_size = 0;
2901 int frame_size;
2902 int lp_temp = LP_REGNO, lp_slot = -1;
2903 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2904 int interrupt_handler = mep_interrupt_p ();
2906 if (profile_arc_flag == 2)
2907 emit_insn (gen_mep_bb_trace_ret ());
2909 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2910 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2912 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2914 if (frame_pointer_needed)
2916 /* If we have a frame pointer, we won't have a reliable stack
2917 pointer (alloca, you know), so rebase SP from FP */
2918 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2919 gen_rtx_REG (SImode, FP_REGNO));
2920 sp_offset = reg_save_size;
2922 else
2924 /* SP is right under our local variable space. Adjust it if
2925 needed. */
2926 sp_offset = reg_save_size + frame_size;
2927 if (sp_offset >= 128)
2929 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2930 sp_offset -= frame_size;
2934 /* This is backwards so that we restore the control and coprocessor
2935 registers before the temporary registers we use to restore
2936 them. */
2937 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2938 if (mep_call_saves_register (i))
2940 enum machine_mode rmode;
2941 int rss = cfun->machine->reg_save_slot[i];
2943 if (mep_reg_size (i) == 8)
2944 rmode = DImode;
2945 else
2946 rmode = SImode;
2948 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2949 && !(mep_reg_set_in_function (i) || interrupt_handler))
2950 continue;
2951 if (mep_prevent_lp_restore && i == LP_REGNO)
2952 continue;
2953 if (!mep_prevent_lp_restore
2954 && !interrupt_handler
2955 && (i == 10 || i == 11))
2956 continue;
2958 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2959 emit_move_insn (gen_rtx_REG (rmode, i),
2960 gen_rtx_MEM (rmode,
2961 plus_constant (Pmode, stack_pointer_rtx,
2962 sp_offset - rss)));
2963 else
2965 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2966 /* Defer this one so we can jump indirect rather than
2967 copying the RA to $lp and "ret". EH epilogues
2968 automatically skip this anyway. */
2969 lp_slot = sp_offset-rss;
2970 else
2972 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2973 gen_rtx_MEM (rmode,
2974 plus_constant (Pmode,
2975 stack_pointer_rtx,
2976 sp_offset-rss)));
2977 emit_move_insn (gen_rtx_REG (rmode, i),
2978 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2982 if (lp_slot != -1)
2984 /* Restore this one last so we know it will be in the temp
2985 register when we return by jumping indirectly via the temp. */
2986 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2987 gen_rtx_MEM (SImode,
2988 plus_constant (Pmode, stack_pointer_rtx,
2989 lp_slot)));
2990 lp_temp = REGSAVE_CONTROL_TEMP;
2994 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
2996 if (crtl->calls_eh_return && mep_prevent_lp_restore)
2997 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
2998 gen_rtx_REG (SImode, SP_REGNO),
2999 cfun->machine->eh_stack_adjust));
3001 if (mep_sibcall_epilogue)
3002 return;
3004 if (mep_disinterrupt_p ())
3005 emit_insn (gen_mep_enable_int ());
3007 if (mep_prevent_lp_restore)
3009 emit_jump_insn (gen_eh_return_internal ());
3010 emit_barrier ();
3012 else if (interrupt_handler)
3013 emit_jump_insn (gen_mep_reti ());
3014 else
3015 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3018 void
3019 mep_expand_eh_return (rtx *operands)
3021 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3023 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3024 emit_move_insn (ra, operands[0]);
3025 operands[0] = ra;
3028 emit_insn (gen_eh_epilogue (operands[0]));
3031 void
3032 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3034 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3035 mep_prevent_lp_restore = 1;
3036 mep_expand_epilogue ();
3037 mep_prevent_lp_restore = 0;
3040 void
3041 mep_expand_sibcall_epilogue (void)
3043 mep_sibcall_epilogue = 1;
3044 mep_expand_epilogue ();
3045 mep_sibcall_epilogue = 0;
3048 static bool
3049 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3051 if (decl == NULL)
3052 return false;
3054 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3055 return false;
3057 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3058 if (mep_interrupt_p () || mep_disinterrupt_p ())
3059 return false;
3061 return true;
3065 mep_return_stackadj_rtx (void)
3067 return gen_rtx_REG (SImode, 10);
3071 mep_return_handler_rtx (void)
3073 return gen_rtx_REG (SImode, LP_REGNO);
3076 void
3077 mep_function_profiler (FILE *file)
3079 /* Always right at the beginning of the function. */
3080 fprintf (file, "\t# mep function profiler\n");
3081 fprintf (file, "\tadd\t$sp, -8\n");
3082 fprintf (file, "\tsw\t$0, ($sp)\n");
3083 fprintf (file, "\tldc\t$0, $lp\n");
3084 fprintf (file, "\tsw\t$0, 4($sp)\n");
3085 fprintf (file, "\tbsr\t__mep_mcount\n");
3086 fprintf (file, "\tlw\t$0, 4($sp)\n");
3087 fprintf (file, "\tstc\t$0, $lp\n");
3088 fprintf (file, "\tlw\t$0, ($sp)\n");
3089 fprintf (file, "\tadd\t$sp, 8\n\n");
3092 const char *
3093 mep_emit_bb_trace_ret (void)
3095 fprintf (asm_out_file, "\t# end of block profiling\n");
3096 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3097 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3098 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3099 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3100 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3101 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3102 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3103 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3104 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3105 return "";
3108 #undef SAVE
3109 #undef RESTORE
3111 /* Operand Printing. */
3113 void
3114 mep_print_operand_address (FILE *stream, rtx address)
3116 if (GET_CODE (address) == MEM)
3117 address = XEXP (address, 0);
3118 else
3119 /* cf: gcc.dg/asm-4.c. */
3120 gcc_assert (GET_CODE (address) == REG);
3122 mep_print_operand (stream, address, 0);
3125 static struct
3127 char code;
3128 const char *pattern;
3129 const char *format;
3131 const conversions[] =
3133 { 0, "r", "0" },
3134 { 0, "m+ri", "3(2)" },
3135 { 0, "mr", "(1)" },
3136 { 0, "ms", "(1)" },
3137 { 0, "ml", "(1)" },
3138 { 0, "mLrs", "%lo(3)(2)" },
3139 { 0, "mLr+si", "%lo(4+5)(2)" },
3140 { 0, "m+ru2s", "%tpoff(5)(2)" },
3141 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3142 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3143 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3144 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3145 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3146 { 0, "mi", "(1)" },
3147 { 0, "m+si", "(2+3)" },
3148 { 0, "m+li", "(2+3)" },
3149 { 0, "i", "0" },
3150 { 0, "s", "0" },
3151 { 0, "+si", "1+2" },
3152 { 0, "+u2si", "%tpoff(3+4)" },
3153 { 0, "+u3si", "%sdaoff(3+4)" },
3154 { 0, "l", "0" },
3155 { 'b', "i", "0" },
3156 { 'B', "i", "0" },
3157 { 'U', "i", "0" },
3158 { 'h', "i", "0" },
3159 { 'h', "Hs", "%hi(1)" },
3160 { 'I', "i", "0" },
3161 { 'I', "u2s", "%tpoff(2)" },
3162 { 'I', "u3s", "%sdaoff(2)" },
3163 { 'I', "+u2si", "%tpoff(3+4)" },
3164 { 'I', "+u3si", "%sdaoff(3+4)" },
3165 { 'J', "i", "0" },
3166 { 'P', "mr", "(1\\+),\\0" },
3167 { 'x', "i", "0" },
3168 { 0, 0, 0 }
3171 static int
3172 unique_bit_in (HOST_WIDE_INT i)
3174 switch (i & 0xff)
3176 case 0x01: case 0xfe: return 0;
3177 case 0x02: case 0xfd: return 1;
3178 case 0x04: case 0xfb: return 2;
3179 case 0x08: case 0xf7: return 3;
3180 case 0x10: case 0x7f: return 4;
3181 case 0x20: case 0xbf: return 5;
3182 case 0x40: case 0xdf: return 6;
3183 case 0x80: case 0xef: return 7;
3184 default:
3185 gcc_unreachable ();
3189 static int
3190 bit_size_for_clip (HOST_WIDE_INT i)
3192 int rv;
3194 for (rv = 0; rv < 31; rv ++)
3195 if (((HOST_WIDE_INT) 1 << rv) > i)
3196 return rv + 1;
3197 gcc_unreachable ();
3200 /* Print an operand to a assembler instruction. */
3202 void
3203 mep_print_operand (FILE *file, rtx x, int code)
3205 int i, j;
3206 const char *real_name;
3208 if (code == '<')
3210 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3211 we're using, then skip over the "mep_" part of its name. */
3212 const struct cgen_insn *insn;
3214 if (mep_get_move_insn (mep_cmov, &insn))
3215 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3216 else
3217 mep_intrinsic_unavailable (mep_cmov);
3218 return;
3220 if (code == 'L')
3222 switch (GET_CODE (x))
3224 case AND:
3225 fputs ("clr", file);
3226 return;
3227 case IOR:
3228 fputs ("set", file);
3229 return;
3230 case XOR:
3231 fputs ("not", file);
3232 return;
3233 default:
3234 output_operand_lossage ("invalid %%L code");
3237 if (code == 'M')
3239 /* Print the second operand of a CR <- CR move. If we're using
3240 a two-operand instruction (i.e., a real cmov), then just print
3241 the operand normally. If we're using a "reg, reg, immediate"
3242 instruction such as caddi3, print the operand followed by a
3243 zero field. If we're using a three-register instruction,
3244 print the operand twice. */
3245 const struct cgen_insn *insn;
3247 mep_print_operand (file, x, 0);
3248 if (mep_get_move_insn (mep_cmov, &insn)
3249 && insn_data[insn->icode].n_operands == 3)
3251 fputs (", ", file);
3252 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3253 mep_print_operand (file, x, 0);
3254 else
3255 mep_print_operand (file, const0_rtx, 0);
3257 return;
3260 encode_pattern (x);
3261 for (i = 0; conversions[i].pattern; i++)
3262 if (conversions[i].code == code
3263 && strcmp(conversions[i].pattern, pattern) == 0)
3265 for (j = 0; conversions[i].format[j]; j++)
3266 if (conversions[i].format[j] == '\\')
3268 fputc (conversions[i].format[j+1], file);
3269 j++;
3271 else if (ISDIGIT(conversions[i].format[j]))
3273 rtx r = patternr[conversions[i].format[j] - '0'];
3274 switch (GET_CODE (r))
3276 case REG:
3277 fprintf (file, "%s", reg_names [REGNO (r)]);
3278 break;
3279 case CONST_INT:
3280 switch (code)
3282 case 'b':
3283 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3284 break;
3285 case 'B':
3286 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3287 break;
3288 case 'h':
3289 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3290 break;
3291 case 'U':
3292 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3293 break;
3294 case 'J':
3295 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3296 break;
3297 case 'x':
3298 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3299 && !(INTVAL (r) & 0xff))
3300 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3301 else
3302 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3303 break;
3304 case 'I':
3305 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3306 && conversions[i].format[j+1] == 0)
3308 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3309 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3311 else
3312 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3313 break;
3314 default:
3315 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3316 break;
3318 break;
3319 case CONST_DOUBLE:
3320 fprintf(file, "[const_double 0x%lx]",
3321 (unsigned long) CONST_DOUBLE_HIGH(r));
3322 break;
3323 case SYMBOL_REF:
3324 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3325 assemble_name (file, real_name);
3326 break;
3327 case LABEL_REF:
3328 output_asm_label (r);
3329 break;
3330 default:
3331 fprintf (stderr, "don't know how to print this operand:");
3332 debug_rtx (r);
3333 gcc_unreachable ();
3336 else
3338 if (conversions[i].format[j] == '+'
3339 && (!code || code == 'I')
3340 && ISDIGIT (conversions[i].format[j+1])
3341 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3342 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3343 continue;
3344 fputc(conversions[i].format[j], file);
3346 break;
3348 if (!conversions[i].pattern)
3350 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3351 debug_rtx(x);
3354 return;
3357 void
3358 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3359 int noperands ATTRIBUTE_UNUSED)
3361 /* Despite the fact that MeP is perfectly capable of branching and
3362 doing something else in the same bundle, gcc does jump
3363 optimization *after* scheduling, so we cannot trust the bundling
3364 flags on jump instructions. */
3365 if (GET_MODE (insn) == BImode
3366 && get_attr_slots (insn) != SLOTS_CORE)
3367 fputc ('+', asm_out_file);
3370 /* Function args in registers. */
3372 static void
3373 mep_setup_incoming_varargs (cumulative_args_t cum,
3374 enum machine_mode mode ATTRIBUTE_UNUSED,
3375 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3376 int second_time ATTRIBUTE_UNUSED)
3378 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3380 if (nsave > 0)
3381 cfun->machine->arg_regs_to_save = nsave;
3382 *pretend_size = nsave * 4;
3385 static int
3386 bytesize (const_tree type, enum machine_mode mode)
3388 if (mode == BLKmode)
3389 return int_size_in_bytes (type);
3390 return GET_MODE_SIZE (mode);
3393 static rtx
3394 mep_expand_builtin_saveregs (void)
3396 int bufsize, i, ns;
3397 rtx regbuf;
3399 ns = cfun->machine->arg_regs_to_save;
3400 if (TARGET_IVC2)
3402 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3403 regbuf = assign_stack_local (SImode, bufsize, 64);
3405 else
3407 bufsize = ns * 4;
3408 regbuf = assign_stack_local (SImode, bufsize, 32);
3411 move_block_from_reg (5-ns, regbuf, ns);
3413 if (TARGET_IVC2)
3415 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3416 int ofs = 8 * ((ns+1)/2);
3418 for (i=0; i<ns; i++)
3420 int rn = (4-ns) + i + 49;
3421 rtx ptr;
3423 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3424 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3425 ofs += 8;
3428 return XEXP (regbuf, 0);
3431 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3433 static tree
3434 mep_build_builtin_va_list (void)
3436 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3437 tree record;
3440 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3442 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3443 get_identifier ("__va_next_gp"), ptr_type_node);
3444 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3445 get_identifier ("__va_next_gp_limit"),
3446 ptr_type_node);
3447 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3448 ptr_type_node);
3449 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3450 ptr_type_node);
3452 DECL_FIELD_CONTEXT (f_next_gp) = record;
3453 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3454 DECL_FIELD_CONTEXT (f_next_cop) = record;
3455 DECL_FIELD_CONTEXT (f_next_stack) = record;
3457 TYPE_FIELDS (record) = f_next_gp;
3458 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3459 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3460 DECL_CHAIN (f_next_cop) = f_next_stack;
3462 layout_type (record);
3464 return record;
3467 static void
3468 mep_expand_va_start (tree valist, rtx nextarg)
3470 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3471 tree next_gp, next_gp_limit, next_cop, next_stack;
3472 tree t, u;
3473 int ns;
3475 ns = cfun->machine->arg_regs_to_save;
3477 f_next_gp = TYPE_FIELDS (va_list_type_node);
3478 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3479 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3480 f_next_stack = DECL_CHAIN (f_next_cop);
3482 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3483 NULL_TREE);
3484 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3485 valist, f_next_gp_limit, NULL_TREE);
3486 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3487 NULL_TREE);
3488 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3489 valist, f_next_stack, NULL_TREE);
3491 /* va_list.next_gp = expand_builtin_saveregs (); */
3492 u = make_tree (sizetype, expand_builtin_saveregs ());
3493 u = fold_convert (ptr_type_node, u);
3494 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3495 TREE_SIDE_EFFECTS (t) = 1;
3496 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3498 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3499 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3500 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3501 TREE_SIDE_EFFECTS (t) = 1;
3502 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3504 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3505 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3506 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3507 TREE_SIDE_EFFECTS (t) = 1;
3508 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3510 /* va_list.next_stack = nextarg; */
3511 u = make_tree (ptr_type_node, nextarg);
3512 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3513 TREE_SIDE_EFFECTS (t) = 1;
3514 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3517 static tree
3518 mep_gimplify_va_arg_expr (tree valist, tree type,
3519 gimple_seq *pre_p,
3520 gimple_seq *post_p ATTRIBUTE_UNUSED)
3522 HOST_WIDE_INT size, rsize;
3523 bool by_reference, ivc2_vec;
3524 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3525 tree next_gp, next_gp_limit, next_cop, next_stack;
3526 tree label_sover, label_selse;
3527 tree tmp, res_addr;
3529 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3531 size = int_size_in_bytes (type);
3532 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3534 if (by_reference)
3536 type = build_pointer_type (type);
3537 size = 4;
3539 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3541 f_next_gp = TYPE_FIELDS (va_list_type_node);
3542 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3543 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3544 f_next_stack = DECL_CHAIN (f_next_cop);
3546 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3547 NULL_TREE);
3548 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3549 valist, f_next_gp_limit, NULL_TREE);
3550 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3551 NULL_TREE);
3552 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3553 valist, f_next_stack, NULL_TREE);
3555 /* if f_next_gp < f_next_gp_limit
3556 IF (VECTOR_P && IVC2)
3557 val = *f_next_cop;
3558 ELSE
3559 val = *f_next_gp;
3560 f_next_gp += 4;
3561 f_next_cop += 8;
3562 else
3563 label_selse:
3564 val = *f_next_stack;
3565 f_next_stack += rsize;
3566 label_sover:
3569 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3570 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3571 res_addr = create_tmp_var (ptr_type_node, NULL);
3573 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3574 unshare_expr (next_gp_limit));
3575 tmp = build3 (COND_EXPR, void_type_node, tmp,
3576 build1 (GOTO_EXPR, void_type_node,
3577 unshare_expr (label_selse)),
3578 NULL_TREE);
3579 gimplify_and_add (tmp, pre_p);
3581 if (ivc2_vec)
3583 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3584 gimplify_and_add (tmp, pre_p);
3586 else
3588 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3589 gimplify_and_add (tmp, pre_p);
3592 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3593 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3595 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3596 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3598 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3599 gimplify_and_add (tmp, pre_p);
3601 /* - - */
3603 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3604 gimplify_and_add (tmp, pre_p);
3606 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3607 gimplify_and_add (tmp, pre_p);
3609 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3610 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3612 /* - - */
3614 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3615 gimplify_and_add (tmp, pre_p);
3617 res_addr = fold_convert (build_pointer_type (type), res_addr);
3619 if (by_reference)
3620 res_addr = build_va_arg_indirect_ref (res_addr);
3622 return build_va_arg_indirect_ref (res_addr);
3625 void
3626 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3627 rtx libname ATTRIBUTE_UNUSED,
3628 tree fndecl ATTRIBUTE_UNUSED)
3630 pcum->nregs = 0;
3632 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3633 pcum->vliw = 1;
3634 else
3635 pcum->vliw = 0;
3638 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3639 larger than 4 bytes are passed indirectly. Return value in 0,
3640 unless bigger than 4 bytes, then the caller passes a pointer as the
3641 first arg. For varargs, we copy $1..$4 to the stack. */
3643 static rtx
3644 mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
3645 const_tree type ATTRIBUTE_UNUSED,
3646 bool named ATTRIBUTE_UNUSED)
3648 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3650 /* VOIDmode is a signal for the backend to pass data to the call
3651 expander via the second operand to the call pattern. We use
3652 this to determine whether to use "jsr" or "jsrv". */
3653 if (mode == VOIDmode)
3654 return GEN_INT (cum->vliw);
3656 /* If we havn't run out of argument registers, return the next. */
3657 if (cum->nregs < 4)
3659 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3660 return gen_rtx_REG (mode, cum->nregs + 49);
3661 else
3662 return gen_rtx_REG (mode, cum->nregs + 1);
3665 /* Otherwise the argument goes on the stack. */
3666 return NULL_RTX;
3669 static bool
3670 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3671 enum machine_mode mode,
3672 const_tree type,
3673 bool named ATTRIBUTE_UNUSED)
3675 int size = bytesize (type, mode);
3677 /* This is non-obvious, but yes, large values passed after we've run
3678 out of registers are *still* passed by reference - we put the
3679 address of the parameter on the stack, as well as putting the
3680 parameter itself elsewhere on the stack. */
3682 if (size <= 0 || size > 8)
3683 return true;
3684 if (size <= 4)
3685 return false;
3686 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3687 && type != NULL_TREE && VECTOR_TYPE_P (type))
3688 return false;
3689 return true;
3692 static void
3693 mep_function_arg_advance (cumulative_args_t pcum,
3694 enum machine_mode mode ATTRIBUTE_UNUSED,
3695 const_tree type ATTRIBUTE_UNUSED,
3696 bool named ATTRIBUTE_UNUSED)
3698 get_cumulative_args (pcum)->nregs += 1;
3701 bool
3702 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3704 int size = bytesize (type, BLKmode);
3705 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3706 return size > 0 && size <= 8 ? 0 : 1;
3707 return size > 0 && size <= 4 ? 0 : 1;
3710 static bool
3711 mep_narrow_volatile_bitfield (void)
3713 return true;
3714 return false;
3717 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3720 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3722 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3723 return gen_rtx_REG (TYPE_MODE (type), 48);
3724 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3727 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3730 mep_libcall_value (enum machine_mode mode)
3732 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3735 /* Handle pipeline hazards. */
3737 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3738 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3740 static int prev_opcode = 0;
3742 /* This isn't as optimal as it could be, because we don't know what
3743 control register the STC opcode is storing in. We only need to add
3744 the nop if it's the relevant register, but we add it for irrelevant
3745 registers also. */
3747 void
3748 mep_asm_output_opcode (FILE *file, const char *ptr)
3750 int this_opcode = op_none;
3751 const char *hazard = 0;
3753 switch (*ptr)
3755 case 'f':
3756 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3757 this_opcode = op_fsft;
3758 break;
3759 case 'r':
3760 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3761 this_opcode = op_ret;
3762 break;
3763 case 's':
3764 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3765 this_opcode = op_stc;
3766 break;
3769 if (prev_opcode == op_stc && this_opcode == op_fsft)
3770 hazard = "nop";
3771 if (prev_opcode == op_stc && this_opcode == op_ret)
3772 hazard = "nop";
3774 if (hazard)
3775 fprintf(file, "%s\t# %s-%s hazard\n\t",
3776 hazard, opnames[prev_opcode], opnames[this_opcode]);
3778 prev_opcode = this_opcode;
3781 /* Handle attributes. */
3783 static tree
3784 mep_validate_based_tiny (tree *node, tree name, tree args,
3785 int flags ATTRIBUTE_UNUSED, bool *no_add)
3787 if (TREE_CODE (*node) != VAR_DECL
3788 && TREE_CODE (*node) != POINTER_TYPE
3789 && TREE_CODE (*node) != TYPE_DECL)
3791 warning (0, "%qE attribute only applies to variables", name);
3792 *no_add = true;
3794 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3796 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3798 warning (0, "address region attributes not allowed with auto storage class");
3799 *no_add = true;
3801 /* Ignore storage attribute of pointed to variable: char __far * x; */
3802 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3804 warning (0, "address region attributes on pointed-to types ignored");
3805 *no_add = true;
3809 return NULL_TREE;
3812 static int
3813 mep_multiple_address_regions (tree list, bool check_section_attr)
3815 tree a;
3816 int count_sections = 0;
3817 int section_attr_count = 0;
3819 for (a = list; a; a = TREE_CHAIN (a))
3821 if (is_attribute_p ("based", TREE_PURPOSE (a))
3822 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3823 || is_attribute_p ("near", TREE_PURPOSE (a))
3824 || is_attribute_p ("far", TREE_PURPOSE (a))
3825 || is_attribute_p ("io", TREE_PURPOSE (a)))
3826 count_sections ++;
3827 if (check_section_attr)
3828 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3831 if (check_section_attr)
3832 return section_attr_count;
3833 else
3834 return count_sections;
3837 #define MEP_ATTRIBUTES(decl) \
3838 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3839 : DECL_ATTRIBUTES (decl) \
3840 ? (DECL_ATTRIBUTES (decl)) \
3841 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3843 static tree
3844 mep_validate_near_far (tree *node, tree name, tree args,
3845 int flags ATTRIBUTE_UNUSED, bool *no_add)
3847 if (TREE_CODE (*node) != VAR_DECL
3848 && TREE_CODE (*node) != FUNCTION_DECL
3849 && TREE_CODE (*node) != METHOD_TYPE
3850 && TREE_CODE (*node) != POINTER_TYPE
3851 && TREE_CODE (*node) != TYPE_DECL)
3853 warning (0, "%qE attribute only applies to variables and functions",
3854 name);
3855 *no_add = true;
3857 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3859 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3861 warning (0, "address region attributes not allowed with auto storage class");
3862 *no_add = true;
3864 /* Ignore storage attribute of pointed to variable: char __far * x; */
3865 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3867 warning (0, "address region attributes on pointed-to types ignored");
3868 *no_add = true;
3871 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3873 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3874 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3875 DECL_ATTRIBUTES (*node) = NULL_TREE;
3877 return NULL_TREE;
3880 static tree
3881 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3882 int flags ATTRIBUTE_UNUSED, bool *no_add)
3884 if (TREE_CODE (*node) != FUNCTION_DECL
3885 && TREE_CODE (*node) != METHOD_TYPE)
3887 warning (0, "%qE attribute only applies to functions", name);
3888 *no_add = true;
3890 return NULL_TREE;
3893 static tree
3894 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3895 int flags ATTRIBUTE_UNUSED, bool *no_add)
3897 tree function_type;
3899 if (TREE_CODE (*node) != FUNCTION_DECL)
3901 warning (0, "%qE attribute only applies to functions", name);
3902 *no_add = true;
3903 return NULL_TREE;
3906 if (DECL_DECLARED_INLINE_P (*node))
3907 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3908 DECL_UNINLINABLE (*node) = 1;
3910 function_type = TREE_TYPE (*node);
3912 if (TREE_TYPE (function_type) != void_type_node)
3913 error ("interrupt function must have return type of void");
3915 if (prototype_p (function_type)
3916 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3917 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3918 error ("interrupt function must have no arguments");
3920 return NULL_TREE;
3923 static tree
3924 mep_validate_io_cb (tree *node, tree name, tree args,
3925 int flags ATTRIBUTE_UNUSED, bool *no_add)
3927 if (TREE_CODE (*node) != VAR_DECL)
3929 warning (0, "%qE attribute only applies to variables", name);
3930 *no_add = true;
3933 if (args != NULL_TREE)
3935 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3936 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3937 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3939 warning (0, "%qE attribute allows only an integer constant argument",
3940 name);
3941 *no_add = true;
3945 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3946 TREE_THIS_VOLATILE (*node) = 1;
3948 return NULL_TREE;
3951 static tree
3952 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3953 int flags ATTRIBUTE_UNUSED, bool *no_add)
3955 if (TREE_CODE (*node) != FUNCTION_TYPE
3956 && TREE_CODE (*node) != FUNCTION_DECL
3957 && TREE_CODE (*node) != METHOD_TYPE
3958 && TREE_CODE (*node) != FIELD_DECL
3959 && TREE_CODE (*node) != TYPE_DECL)
3961 static int gave_pointer_note = 0;
3962 static int gave_array_note = 0;
3963 static const char * given_type = NULL;
3965 given_type = tree_code_name[TREE_CODE (*node)];
3966 if (TREE_CODE (*node) == POINTER_TYPE)
3967 given_type = "pointers";
3968 if (TREE_CODE (*node) == ARRAY_TYPE)
3969 given_type = "arrays";
3971 if (given_type)
3972 warning (0, "%qE attribute only applies to functions, not %s",
3973 name, given_type);
3974 else
3975 warning (0, "%qE attribute only applies to functions",
3976 name);
3977 *no_add = true;
3979 if (TREE_CODE (*node) == POINTER_TYPE
3980 && !gave_pointer_note)
3982 inform (input_location,
3983 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3984 " typedef int (__vliw *vfuncptr) ();");
3985 gave_pointer_note = 1;
3988 if (TREE_CODE (*node) == ARRAY_TYPE
3989 && !gave_array_note)
3991 inform (input_location,
3992 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
3993 " typedef int (__vliw *vfuncptr[]) ();");
3994 gave_array_note = 1;
3997 if (!TARGET_VLIW)
3998 error ("VLIW functions are not allowed without a VLIW configuration");
3999 return NULL_TREE;
4002 static const struct attribute_spec mep_attribute_table[11] =
4004 /* name min max decl type func handler
4005 affects_type_identity */
4006 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4007 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4008 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4009 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4010 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4011 false },
4012 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4013 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4014 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4015 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4016 { NULL, 0, 0, false, false, false, NULL, false }
4019 static bool
4020 mep_function_attribute_inlinable_p (const_tree callee)
4022 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4023 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4024 return (lookup_attribute ("disinterrupt", attrs) == 0
4025 && lookup_attribute ("interrupt", attrs) == 0);
4028 static bool
4029 mep_can_inline_p (tree caller, tree callee)
4031 if (TREE_CODE (callee) == ADDR_EXPR)
4032 callee = TREE_OPERAND (callee, 0);
4034 if (!mep_vliw_function_p (caller)
4035 && mep_vliw_function_p (callee))
4037 return false;
4039 return true;
4042 #define FUNC_CALL 1
4043 #define FUNC_DISINTERRUPT 2
4046 struct GTY(()) pragma_entry {
4047 int used;
4048 int flag;
4049 const char *funcname;
4051 typedef struct pragma_entry pragma_entry;
4053 /* Hash table of farcall-tagged sections. */
4054 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4056 static int
4057 pragma_entry_eq (const void *p1, const void *p2)
4059 const pragma_entry *old = (const pragma_entry *) p1;
4060 const char *new_name = (const char *) p2;
4062 return strcmp (old->funcname, new_name) == 0;
4065 static hashval_t
4066 pragma_entry_hash (const void *p)
4068 const pragma_entry *old = (const pragma_entry *) p;
4069 return htab_hash_string (old->funcname);
4072 static void
4073 mep_note_pragma_flag (const char *funcname, int flag)
4075 pragma_entry **slot;
4077 if (!pragma_htab)
4078 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4079 pragma_entry_eq, NULL);
4081 slot = (pragma_entry **)
4082 htab_find_slot_with_hash (pragma_htab, funcname,
4083 htab_hash_string (funcname), INSERT);
4085 if (!*slot)
4087 *slot = ggc_alloc_pragma_entry ();
4088 (*slot)->flag = 0;
4089 (*slot)->used = 0;
4090 (*slot)->funcname = ggc_strdup (funcname);
4092 (*slot)->flag |= flag;
4095 static bool
4096 mep_lookup_pragma_flag (const char *funcname, int flag)
4098 pragma_entry **slot;
4100 if (!pragma_htab)
4101 return false;
4103 if (funcname[0] == '@' && funcname[2] == '.')
4104 funcname += 3;
4106 slot = (pragma_entry **)
4107 htab_find_slot_with_hash (pragma_htab, funcname,
4108 htab_hash_string (funcname), NO_INSERT);
4109 if (slot && *slot && ((*slot)->flag & flag))
4111 (*slot)->used |= flag;
4112 return true;
4114 return false;
4117 bool
4118 mep_lookup_pragma_call (const char *funcname)
4120 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4123 void
4124 mep_note_pragma_call (const char *funcname)
4126 mep_note_pragma_flag (funcname, FUNC_CALL);
4129 bool
4130 mep_lookup_pragma_disinterrupt (const char *funcname)
4132 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4135 void
4136 mep_note_pragma_disinterrupt (const char *funcname)
4138 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4141 static int
4142 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4144 const pragma_entry *d = (const pragma_entry *)(*slot);
4146 if ((d->flag & FUNC_DISINTERRUPT)
4147 && !(d->used & FUNC_DISINTERRUPT))
4148 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4149 return 1;
4152 void
4153 mep_file_cleanups (void)
4155 if (pragma_htab)
4156 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4159 /* These three functions provide a bridge between the pramgas that
4160 affect register classes, and the functions that maintain them. We
4161 can't call those functions directly as pragma handling is part of
4162 the front end and doesn't have direct access to them. */
4164 void
4165 mep_save_register_info (void)
4167 save_register_info ();
4170 void
4171 mep_reinit_regs (void)
4173 reinit_regs ();
4176 void
4177 mep_init_regs (void)
4179 init_regs ();
4184 static int
4185 mep_attrlist_to_encoding (tree list, tree decl)
4187 if (mep_multiple_address_regions (list, false) > 1)
4189 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4190 TREE_PURPOSE (TREE_CHAIN (list)),
4191 DECL_NAME (decl),
4192 DECL_SOURCE_LINE (decl));
4193 TREE_CHAIN (list) = NULL_TREE;
4196 while (list)
4198 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4199 return 'b';
4200 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4201 return 't';
4202 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4203 return 'n';
4204 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4205 return 'f';
4206 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4208 if (TREE_VALUE (list)
4209 && TREE_VALUE (TREE_VALUE (list))
4210 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4212 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4213 if (location >= 0
4214 && location <= 0x1000000)
4215 return 'i';
4217 return 'I';
4219 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4220 return 'c';
4221 list = TREE_CHAIN (list);
4223 if (TARGET_TF
4224 && TREE_CODE (decl) == FUNCTION_DECL
4225 && DECL_SECTION_NAME (decl) == 0)
4226 return 'f';
4227 return 0;
4230 static int
4231 mep_comp_type_attributes (const_tree t1, const_tree t2)
4233 int vliw1, vliw2;
4235 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4236 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4238 if (vliw1 != vliw2)
4239 return 0;
4241 return 1;
4244 static void
4245 mep_insert_attributes (tree decl, tree *attributes)
4247 int size;
4248 const char *secname = 0;
4249 tree attrib, attrlist;
4250 char encoding;
4252 if (TREE_CODE (decl) == FUNCTION_DECL)
4254 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4256 if (mep_lookup_pragma_disinterrupt (funcname))
4258 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4259 *attributes = chainon (*attributes, attrib);
4263 if (TREE_CODE (decl) != VAR_DECL
4264 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4265 return;
4267 if (TREE_READONLY (decl) && TARGET_DC)
4268 /* -mdc means that const variables default to the near section,
4269 regardless of the size cutoff. */
4270 return;
4272 /* User specified an attribute, so override the default.
4273 Ignore storage attribute of pointed to variable. char __far * x; */
4274 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4276 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4277 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4278 else if (DECL_ATTRIBUTES (decl) && *attributes)
4279 DECL_ATTRIBUTES (decl) = NULL_TREE;
4282 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4283 encoding = mep_attrlist_to_encoding (attrlist, decl);
4284 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4286 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4287 encoding = mep_attrlist_to_encoding (attrlist, decl);
4289 if (encoding)
4291 /* This means that the declaration has a specific section
4292 attribute, so we should not apply the default rules. */
4294 if (encoding == 'i' || encoding == 'I')
4296 tree attr = lookup_attribute ("io", attrlist);
4297 if (attr
4298 && TREE_VALUE (attr)
4299 && TREE_VALUE (TREE_VALUE(attr)))
4301 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4302 static tree previous_value = 0;
4303 static int previous_location = 0;
4304 static tree previous_name = 0;
4306 /* We take advantage of the fact that gcc will reuse the
4307 same tree pointer when applying an attribute to a
4308 list of decls, but produce a new tree for attributes
4309 on separate source lines, even when they're textually
4310 identical. This is the behavior we want. */
4311 if (TREE_VALUE (attr) == previous_value
4312 && location == previous_location)
4314 warning(0, "__io address 0x%x is the same for %qE and %qE",
4315 location, previous_name, DECL_NAME (decl));
4317 previous_name = DECL_NAME (decl);
4318 previous_location = location;
4319 previous_value = TREE_VALUE (attr);
4322 return;
4326 /* Declarations of arrays can change size. Don't trust them. */
4327 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4328 size = 0;
4329 else
4330 size = int_size_in_bytes (TREE_TYPE (decl));
4332 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4334 if (TREE_PUBLIC (decl)
4335 || DECL_EXTERNAL (decl)
4336 || TREE_STATIC (decl))
4338 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4339 int key = 0;
4341 while (*name)
4342 key += *name++;
4344 switch (key & 3)
4346 case 0:
4347 secname = "based";
4348 break;
4349 case 1:
4350 secname = "tiny";
4351 break;
4352 case 2:
4353 secname = "far";
4354 break;
4355 default:
4360 else
4362 if (size <= mep_based_cutoff && size > 0)
4363 secname = "based";
4364 else if (size <= mep_tiny_cutoff && size > 0)
4365 secname = "tiny";
4366 else if (TARGET_L)
4367 secname = "far";
4370 if (mep_const_section && TREE_READONLY (decl))
4372 if (strcmp (mep_const_section, "tiny") == 0)
4373 secname = "tiny";
4374 else if (strcmp (mep_const_section, "near") == 0)
4375 return;
4376 else if (strcmp (mep_const_section, "far") == 0)
4377 secname = "far";
4380 if (!secname)
4381 return;
4383 if (!mep_multiple_address_regions (*attributes, true)
4384 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4386 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4388 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4389 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4390 and mep_validate_based_tiny. */
4391 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4395 static void
4396 mep_encode_section_info (tree decl, rtx rtl, int first)
4398 rtx rtlname;
4399 const char *oldname;
4400 const char *secname;
4401 char encoding;
4402 char *newname;
4403 tree idp;
4404 int maxsize;
4405 tree type;
4406 tree mep_attributes;
4408 if (! first)
4409 return;
4411 if (TREE_CODE (decl) != VAR_DECL
4412 && TREE_CODE (decl) != FUNCTION_DECL)
4413 return;
4415 rtlname = XEXP (rtl, 0);
4416 if (GET_CODE (rtlname) == SYMBOL_REF)
4417 oldname = XSTR (rtlname, 0);
4418 else if (GET_CODE (rtlname) == MEM
4419 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4420 oldname = XSTR (XEXP (rtlname, 0), 0);
4421 else
4422 gcc_unreachable ();
4424 type = TREE_TYPE (decl);
4425 if (type == error_mark_node)
4426 return;
4427 mep_attributes = MEP_ATTRIBUTES (decl);
4429 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4431 if (encoding)
4433 newname = (char *) alloca (strlen (oldname) + 4);
4434 sprintf (newname, "@%c.%s", encoding, oldname);
4435 idp = get_identifier (newname);
4436 XEXP (rtl, 0) =
4437 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4438 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4439 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4441 switch (encoding)
4443 case 'b':
4444 maxsize = 128;
4445 secname = "based";
4446 break;
4447 case 't':
4448 maxsize = 65536;
4449 secname = "tiny";
4450 break;
4451 case 'n':
4452 maxsize = 0x1000000;
4453 secname = "near";
4454 break;
4455 default:
4456 maxsize = 0;
4457 secname = 0;
4458 break;
4460 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4462 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4463 oldname,
4464 (long) int_size_in_bytes (TREE_TYPE (decl)),
4465 secname,
4466 maxsize);
4471 const char *
4472 mep_strip_name_encoding (const char *sym)
4474 while (1)
4476 if (*sym == '*')
4477 sym++;
4478 else if (*sym == '@' && sym[2] == '.')
4479 sym += 3;
4480 else
4481 return sym;
4485 static section *
4486 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4487 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4489 int readonly = 1;
4490 int encoding;
4492 switch (TREE_CODE (decl))
4494 case VAR_DECL:
4495 if (!TREE_READONLY (decl)
4496 || TREE_SIDE_EFFECTS (decl)
4497 || !DECL_INITIAL (decl)
4498 || (DECL_INITIAL (decl) != error_mark_node
4499 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4500 readonly = 0;
4501 break;
4502 case CONSTRUCTOR:
4503 if (! TREE_CONSTANT (decl))
4504 readonly = 0;
4505 break;
4507 default:
4508 break;
4511 if (TREE_CODE (decl) == FUNCTION_DECL)
4513 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4515 if (name[0] == '@' && name[2] == '.')
4516 encoding = name[1];
4517 else
4518 encoding = 0;
4520 if (flag_function_sections || DECL_ONE_ONLY (decl))
4521 mep_unique_section (decl, 0);
4522 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4524 if (encoding == 'f')
4525 return vftext_section;
4526 else
4527 return vtext_section;
4529 else if (encoding == 'f')
4530 return ftext_section;
4531 else
4532 return text_section;
4535 if (TREE_CODE (decl) == VAR_DECL)
4537 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4539 if (name[0] == '@' && name[2] == '.')
4540 switch (name[1])
4542 case 'b':
4543 return based_section;
4545 case 't':
4546 if (readonly)
4547 return srodata_section;
4548 if (DECL_INITIAL (decl))
4549 return sdata_section;
4550 return tinybss_section;
4552 case 'f':
4553 if (readonly)
4554 return frodata_section;
4555 return far_section;
4557 case 'i':
4558 case 'I':
4559 error_at (DECL_SOURCE_LOCATION (decl),
4560 "variable %D of type %<io%> must be uninitialized", decl);
4561 return data_section;
4563 case 'c':
4564 error_at (DECL_SOURCE_LOCATION (decl),
4565 "variable %D of type %<cb%> must be uninitialized", decl);
4566 return data_section;
4570 if (readonly)
4571 return readonly_data_section;
4573 return data_section;
4576 static void
4577 mep_unique_section (tree decl, int reloc)
4579 static const char *prefixes[][2] =
4581 { ".text.", ".gnu.linkonce.t." },
4582 { ".rodata.", ".gnu.linkonce.r." },
4583 { ".data.", ".gnu.linkonce.d." },
4584 { ".based.", ".gnu.linkonce.based." },
4585 { ".sdata.", ".gnu.linkonce.s." },
4586 { ".far.", ".gnu.linkonce.far." },
4587 { ".ftext.", ".gnu.linkonce.ft." },
4588 { ".frodata.", ".gnu.linkonce.frd." },
4589 { ".srodata.", ".gnu.linkonce.srd." },
4590 { ".vtext.", ".gnu.linkonce.v." },
4591 { ".vftext.", ".gnu.linkonce.vf." }
4593 int sec = 2; /* .data */
4594 int len;
4595 const char *name, *prefix;
4596 char *string;
4598 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4599 if (DECL_RTL (decl))
4600 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4602 if (TREE_CODE (decl) == FUNCTION_DECL)
4604 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4605 sec = 9; /* .vtext */
4606 else
4607 sec = 0; /* .text */
4609 else if (decl_readonly_section (decl, reloc))
4610 sec = 1; /* .rodata */
4612 if (name[0] == '@' && name[2] == '.')
4614 switch (name[1])
4616 case 'b':
4617 sec = 3; /* .based */
4618 break;
4619 case 't':
4620 if (sec == 1)
4621 sec = 8; /* .srodata */
4622 else
4623 sec = 4; /* .sdata */
4624 break;
4625 case 'f':
4626 if (sec == 0)
4627 sec = 6; /* .ftext */
4628 else if (sec == 9)
4629 sec = 10; /* .vftext */
4630 else if (sec == 1)
4631 sec = 7; /* .frodata */
4632 else
4633 sec = 5; /* .far. */
4634 break;
4636 name += 3;
4639 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4640 len = strlen (name) + strlen (prefix);
4641 string = (char *) alloca (len + 1);
4643 sprintf (string, "%s%s", prefix, name);
4645 DECL_SECTION_NAME (decl) = build_string (len, string);
4648 /* Given a decl, a section name, and whether the decl initializer
4649 has relocs, choose attributes for the section. */
4651 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4653 static unsigned int
4654 mep_section_type_flags (tree decl, const char *name, int reloc)
4656 unsigned int flags = default_section_type_flags (decl, name, reloc);
4658 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4659 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4660 flags |= SECTION_MEP_VLIW;
4662 return flags;
4665 /* Switch to an arbitrary section NAME with attributes as specified
4666 by FLAGS. ALIGN specifies any known alignment requirements for
4667 the section; 0 if the default should be used.
4669 Differs from the standard ELF version only in support of VLIW mode. */
4671 static void
4672 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4674 char flagchars[8], *f = flagchars;
4675 const char *type;
4677 if (!(flags & SECTION_DEBUG))
4678 *f++ = 'a';
4679 if (flags & SECTION_WRITE)
4680 *f++ = 'w';
4681 if (flags & SECTION_CODE)
4682 *f++ = 'x';
4683 if (flags & SECTION_SMALL)
4684 *f++ = 's';
4685 if (flags & SECTION_MEP_VLIW)
4686 *f++ = 'v';
4687 *f = '\0';
4689 if (flags & SECTION_BSS)
4690 type = "nobits";
4691 else
4692 type = "progbits";
4694 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4695 name, flagchars, type);
4697 if (flags & SECTION_CODE)
4698 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4699 asm_out_file);
4702 void
4703 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4704 int size, int align, int global)
4706 /* We intentionally don't use mep_section_tag() here. */
4707 if (name[0] == '@'
4708 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4709 && name[2] == '.')
4711 int location = -1;
4712 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4713 DECL_ATTRIBUTES (decl));
4714 if (attr
4715 && TREE_VALUE (attr)
4716 && TREE_VALUE (TREE_VALUE(attr)))
4717 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4718 if (location == -1)
4719 return;
4720 if (global)
4722 fprintf (stream, "\t.globl\t");
4723 assemble_name (stream, name);
4724 fprintf (stream, "\n");
4726 assemble_name (stream, name);
4727 fprintf (stream, " = %d\n", location);
4728 return;
4730 if (name[0] == '@' && name[2] == '.')
4732 const char *sec = 0;
4733 switch (name[1])
4735 case 'b':
4736 switch_to_section (based_section);
4737 sec = ".based";
4738 break;
4739 case 't':
4740 switch_to_section (tinybss_section);
4741 sec = ".sbss";
4742 break;
4743 case 'f':
4744 switch_to_section (farbss_section);
4745 sec = ".farbss";
4746 break;
4748 if (sec)
4750 const char *name2;
4751 int p2align = 0;
4753 while (align > BITS_PER_UNIT)
4755 align /= 2;
4756 p2align ++;
4758 name2 = targetm.strip_name_encoding (name);
4759 if (global)
4760 fprintf (stream, "\t.globl\t%s\n", name2);
4761 fprintf (stream, "\t.p2align %d\n", p2align);
4762 fprintf (stream, "\t.type\t%s,@object\n", name2);
4763 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4764 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4765 return;
4769 if (!global)
4771 fprintf (stream, "\t.local\t");
4772 assemble_name (stream, name);
4773 fprintf (stream, "\n");
4775 fprintf (stream, "\t.comm\t");
4776 assemble_name (stream, name);
4777 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4780 /* Trampolines. */
4782 static void
4783 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4785 rtx addr = XEXP (m_tramp, 0);
4786 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4788 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4789 LCT_NORMAL, VOIDmode, 3,
4790 addr, Pmode,
4791 fnaddr, Pmode,
4792 static_chain, Pmode);
4795 /* Experimental Reorg. */
4797 static bool
4798 mep_mentioned_p (rtx in,
4799 rtx reg, /* NULL for mem */
4800 int modes_too) /* if nonzero, modes must match also. */
4802 const char *fmt;
4803 int i;
4804 enum rtx_code code;
4806 if (in == 0)
4807 return false;
4808 if (reg && GET_CODE (reg) != REG)
4809 return false;
4811 if (GET_CODE (in) == LABEL_REF)
4812 return (reg == 0);
4814 code = GET_CODE (in);
4816 switch (code)
4818 case MEM:
4819 if (reg)
4820 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4821 return true;
4823 case REG:
4824 if (!reg)
4825 return false;
4826 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4827 return false;
4828 return (REGNO (in) == REGNO (reg));
4830 case SCRATCH:
4831 case CC0:
4832 case PC:
4833 case CONST_INT:
4834 case CONST_DOUBLE:
4835 return false;
4837 default:
4838 break;
4841 /* Set's source should be read-only. */
4842 if (code == SET && !reg)
4843 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4845 fmt = GET_RTX_FORMAT (code);
4847 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4849 if (fmt[i] == 'E')
4851 register int j;
4852 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4853 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4854 return true;
4856 else if (fmt[i] == 'e'
4857 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4858 return true;
4860 return false;
4863 #define EXPERIMENTAL_REGMOVE_REORG 1
4865 #if EXPERIMENTAL_REGMOVE_REORG
4867 static int
4868 mep_compatible_reg_class (int r1, int r2)
4870 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4871 return 1;
4872 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4873 return 1;
4874 return 0;
4877 static void
4878 mep_reorg_regmove (rtx insns)
4880 rtx insn, next, pat, follow, *where;
4881 int count = 0, done = 0, replace, before = 0;
4883 if (dump_file)
4884 for (insn = insns; insn; insn = NEXT_INSN (insn))
4885 if (GET_CODE (insn) == INSN)
4886 before++;
4888 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4889 set that uses the r2 and r2 dies there. We replace r2 with r1
4890 and see if it's still a valid insn. If so, delete the first set.
4891 Copied from reorg.c. */
4893 while (!done)
4895 done = 1;
4896 for (insn = insns; insn; insn = next)
4898 next = next_nonnote_nondebug_insn (insn);
4899 if (GET_CODE (insn) != INSN)
4900 continue;
4901 pat = PATTERN (insn);
4903 replace = 0;
4905 if (GET_CODE (pat) == SET
4906 && GET_CODE (SET_SRC (pat)) == REG
4907 && GET_CODE (SET_DEST (pat)) == REG
4908 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4909 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4911 follow = next_nonnote_nondebug_insn (insn);
4912 if (dump_file)
4913 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4915 while (follow && GET_CODE (follow) == INSN
4916 && GET_CODE (PATTERN (follow)) == SET
4917 && !dead_or_set_p (follow, SET_SRC (pat))
4918 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4919 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4921 if (dump_file)
4922 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4923 follow = next_nonnote_insn (follow);
4926 if (dump_file)
4927 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4928 if (follow && GET_CODE (follow) == INSN
4929 && GET_CODE (PATTERN (follow)) == SET
4930 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4932 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4934 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4936 replace = 1;
4937 where = & SET_SRC (PATTERN (follow));
4940 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4942 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4944 replace = 1;
4945 where = & PATTERN (follow);
4951 /* If so, follow is the corresponding insn */
4952 if (replace)
4954 if (dump_file)
4956 rtx x;
4958 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4959 for (x = insn; x ;x = NEXT_INSN (x))
4961 print_rtl_single (dump_file, x);
4962 if (x == follow)
4963 break;
4964 fprintf (dump_file, "\n");
4968 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4969 follow, where))
4971 count ++;
4972 delete_insn (insn);
4973 if (dump_file)
4975 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4976 print_rtl_single (dump_file, follow);
4978 done = 0;
4984 if (dump_file)
4986 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
4987 fprintf (dump_file, "=====\n");
4990 #endif
4993 /* Figure out where to put LABEL, which is the label for a repeat loop.
4994 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
4995 the loop ends just before LAST_INSN. If SHARED, insns other than the
4996 "repeat" might use LABEL to jump to the loop's continuation point.
4998 Return the last instruction in the adjusted loop. */
5000 static rtx
5001 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5002 bool shared)
5004 rtx next, prev;
5005 int count = 0, code, icode;
5007 if (dump_file)
5008 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5009 INSN_UID (last_insn));
5011 /* Set PREV to the last insn in the loop. */
5012 prev = last_insn;
5013 if (!including)
5014 prev = PREV_INSN (prev);
5016 /* Set NEXT to the next insn after the repeat label. */
5017 next = last_insn;
5018 if (!shared)
5019 while (prev != 0)
5021 code = GET_CODE (prev);
5022 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5023 break;
5025 if (INSN_P (prev))
5027 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5028 prev = XVECEXP (PATTERN (prev), 0, 1);
5030 /* Other insns that should not be in the last two opcodes. */
5031 icode = recog_memoized (prev);
5032 if (icode < 0
5033 || icode == CODE_FOR_repeat
5034 || icode == CODE_FOR_erepeat
5035 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5036 break;
5038 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5039 is the second instruction in a VLIW bundle. In that case,
5040 loop again: if the first instruction also satisfies the
5041 conditions above then we will reach here again and put
5042 both of them into the repeat epilogue. Otherwise both
5043 should remain outside. */
5044 if (GET_MODE (prev) != BImode)
5046 count++;
5047 next = prev;
5048 if (dump_file)
5049 print_rtl_single (dump_file, next);
5050 if (count == 2)
5051 break;
5054 prev = PREV_INSN (prev);
5057 /* See if we're adding the label immediately after the repeat insn.
5058 If so, we need to separate them with a nop. */
5059 prev = prev_real_insn (next);
5060 if (prev)
5061 switch (recog_memoized (prev))
5063 case CODE_FOR_repeat:
5064 case CODE_FOR_erepeat:
5065 if (dump_file)
5066 fprintf (dump_file, "Adding nop inside loop\n");
5067 emit_insn_before (gen_nop (), next);
5068 break;
5070 default:
5071 break;
5074 /* Insert the label. */
5075 emit_label_before (label, next);
5077 /* Insert the nops. */
5078 if (dump_file && count < 2)
5079 fprintf (dump_file, "Adding %d nop%s\n\n",
5080 2 - count, count == 1 ? "" : "s");
5082 for (; count < 2; count++)
5083 if (including)
5084 last_insn = emit_insn_after (gen_nop (), last_insn);
5085 else
5086 emit_insn_before (gen_nop (), last_insn);
5088 return last_insn;
5092 void
5093 mep_emit_doloop (rtx *operands, int is_end)
5095 rtx tag;
5097 if (cfun->machine->doloop_tags == 0
5098 || cfun->machine->doloop_tag_from_end == is_end)
5100 cfun->machine->doloop_tags++;
5101 cfun->machine->doloop_tag_from_end = is_end;
5104 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5105 if (is_end)
5106 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5107 else
5108 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5112 /* Code for converting doloop_begins and doloop_ends into valid
5113 MeP instructions. A doloop_begin is just a placeholder:
5115 $count = unspec ($count)
5117 where $count is initially the number of iterations - 1.
5118 doloop_end has the form:
5120 if ($count-- == 0) goto label
5122 The counter variable is private to the doloop insns, nothing else
5123 relies on its value.
5125 There are three cases, in decreasing order of preference:
5127 1. A loop has exactly one doloop_begin and one doloop_end.
5128 The doloop_end branches to the first instruction after
5129 the doloop_begin.
5131 In this case we can replace the doloop_begin with a repeat
5132 instruction and remove the doloop_end. I.e.:
5134 $count1 = unspec ($count1)
5135 label:
5137 insn1
5138 insn2
5139 if ($count2-- == 0) goto label
5141 becomes:
5143 repeat $count1,repeat_label
5144 label:
5146 repeat_label:
5147 insn1
5148 insn2
5149 # end repeat
5151 2. As for (1), except there are several doloop_ends. One of them
5152 (call it X) falls through to a label L. All the others fall
5153 through to branches to L.
5155 In this case, we remove X and replace the other doloop_ends
5156 with branches to the repeat label. For example:
5158 $count1 = unspec ($count1)
5159 start:
5161 if ($count2-- == 0) goto label
5162 end:
5164 if ($count3-- == 0) goto label
5165 goto end
5167 becomes:
5169 repeat $count1,repeat_label
5170 start:
5172 repeat_label:
5175 # end repeat
5176 end:
5178 goto repeat_label
5180 3. The fallback case. Replace doloop_begins with:
5182 $count = $count + 1
5184 Replace doloop_ends with the equivalent of:
5186 $count = $count - 1
5187 if ($count == 0) goto label
5189 Note that this might need a scratch register if $count
5190 is stored in memory. */
5192 /* A structure describing one doloop_begin. */
5193 struct mep_doloop_begin {
5194 /* The next doloop_begin with the same tag. */
5195 struct mep_doloop_begin *next;
5197 /* The instruction itself. */
5198 rtx insn;
5200 /* The initial counter value. This is known to be a general register. */
5201 rtx counter;
5204 /* A structure describing a doloop_end. */
5205 struct mep_doloop_end {
5206 /* The next doloop_end with the same loop tag. */
5207 struct mep_doloop_end *next;
5209 /* The instruction itself. */
5210 rtx insn;
5212 /* The first instruction after INSN when the branch isn't taken. */
5213 rtx fallthrough;
5215 /* The location of the counter value. Since doloop_end_internal is a
5216 jump instruction, it has to allow the counter to be stored anywhere
5217 (any non-fixed register or memory location). */
5218 rtx counter;
5220 /* The target label (the place where the insn branches when the counter
5221 isn't zero). */
5222 rtx label;
5224 /* A scratch register. Only available when COUNTER isn't stored
5225 in a general register. */
5226 rtx scratch;
5230 /* One do-while loop. */
5231 struct mep_doloop {
5232 /* All the doloop_begins for this loop (in no particular order). */
5233 struct mep_doloop_begin *begin;
5235 /* All the doloop_ends. When there is more than one, arrange things
5236 so that the first one is the most likely to be X in case (2) above. */
5237 struct mep_doloop_end *end;
5241 /* Return true if LOOP can be converted into repeat/repeat_end form
5242 (that is, if it matches cases (1) or (2) above). */
5244 static bool
5245 mep_repeat_loop_p (struct mep_doloop *loop)
5247 struct mep_doloop_end *end;
5248 rtx fallthrough;
5250 /* There must be exactly one doloop_begin and at least one doloop_end. */
5251 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5252 return false;
5254 /* The first doloop_end (X) must branch back to the insn after
5255 the doloop_begin. */
5256 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5257 return false;
5259 /* All the other doloop_ends must branch to the same place as X.
5260 When the branch isn't taken, they must jump to the instruction
5261 after X. */
5262 fallthrough = loop->end->fallthrough;
5263 for (end = loop->end->next; end != 0; end = end->next)
5264 if (end->label != loop->end->label
5265 || !simplejump_p (end->fallthrough)
5266 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5267 return false;
5269 return true;
5273 /* The main repeat reorg function. See comment above for details. */
5275 static void
5276 mep_reorg_repeat (rtx insns)
5278 rtx insn;
5279 struct mep_doloop *loops, *loop;
5280 struct mep_doloop_begin *begin;
5281 struct mep_doloop_end *end;
5283 /* Quick exit if we haven't created any loops. */
5284 if (cfun->machine->doloop_tags == 0)
5285 return;
5287 /* Create an array of mep_doloop structures. */
5288 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5289 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5291 /* Search the function for do-while insns and group them by loop tag. */
5292 for (insn = insns; insn; insn = NEXT_INSN (insn))
5293 if (INSN_P (insn))
5294 switch (recog_memoized (insn))
5296 case CODE_FOR_doloop_begin_internal:
5297 insn_extract (insn);
5298 loop = &loops[INTVAL (recog_data.operand[2])];
5300 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5301 begin->next = loop->begin;
5302 begin->insn = insn;
5303 begin->counter = recog_data.operand[0];
5305 loop->begin = begin;
5306 break;
5308 case CODE_FOR_doloop_end_internal:
5309 insn_extract (insn);
5310 loop = &loops[INTVAL (recog_data.operand[2])];
5312 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5313 end->insn = insn;
5314 end->fallthrough = next_real_insn (insn);
5315 end->counter = recog_data.operand[0];
5316 end->label = recog_data.operand[1];
5317 end->scratch = recog_data.operand[3];
5319 /* If this insn falls through to an unconditional jump,
5320 give it a lower priority than the others. */
5321 if (loop->end != 0 && simplejump_p (end->fallthrough))
5323 end->next = loop->end->next;
5324 loop->end->next = end;
5326 else
5328 end->next = loop->end;
5329 loop->end = end;
5331 break;
5334 /* Convert the insns for each loop in turn. */
5335 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5336 if (mep_repeat_loop_p (loop))
5338 /* Case (1) or (2). */
5339 rtx repeat_label, label_ref;
5341 /* Create a new label for the repeat insn. */
5342 repeat_label = gen_label_rtx ();
5344 /* Replace the doloop_begin with a repeat. */
5345 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5346 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5347 loop->begin->insn);
5348 delete_insn (loop->begin->insn);
5350 /* Insert the repeat label before the first doloop_end.
5351 Fill the gap with nops if there are other doloop_ends. */
5352 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5353 false, loop->end->next != 0);
5355 /* Emit a repeat_end (to improve the readability of the output). */
5356 emit_insn_before (gen_repeat_end (), loop->end->insn);
5358 /* Delete the first doloop_end. */
5359 delete_insn (loop->end->insn);
5361 /* Replace the others with branches to REPEAT_LABEL. */
5362 for (end = loop->end->next; end != 0; end = end->next)
5364 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5365 delete_insn (end->insn);
5366 delete_insn (end->fallthrough);
5369 else
5371 /* Case (3). First replace all the doloop_begins with increment
5372 instructions. */
5373 for (begin = loop->begin; begin != 0; begin = begin->next)
5375 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5376 begin->counter, const1_rtx),
5377 begin->insn);
5378 delete_insn (begin->insn);
5381 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5382 for (end = loop->end; end != 0; end = end->next)
5384 rtx reg;
5386 start_sequence ();
5388 /* Load the counter value into a general register. */
5389 reg = end->counter;
5390 if (!REG_P (reg) || REGNO (reg) > 15)
5392 reg = end->scratch;
5393 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5396 /* Decrement the counter. */
5397 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5398 constm1_rtx));
5400 /* Copy it back to its original location. */
5401 if (reg != end->counter)
5402 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5404 /* Jump back to the start label. */
5405 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5406 end->label));
5407 JUMP_LABEL (insn) = end->label;
5408 LABEL_NUSES (end->label)++;
5410 /* Emit the whole sequence before the doloop_end. */
5411 insn = get_insns ();
5412 end_sequence ();
5413 emit_insn_before (insn, end->insn);
5415 /* Delete the doloop_end. */
5416 delete_insn (end->insn);
5422 static bool
5423 mep_invertable_branch_p (rtx insn)
5425 rtx cond, set;
5426 enum rtx_code old_code;
5427 int i;
5429 set = PATTERN (insn);
5430 if (GET_CODE (set) != SET)
5431 return false;
5432 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5433 return false;
5434 cond = XEXP (XEXP (set, 1), 0);
5435 old_code = GET_CODE (cond);
5436 switch (old_code)
5438 case EQ:
5439 PUT_CODE (cond, NE);
5440 break;
5441 case NE:
5442 PUT_CODE (cond, EQ);
5443 break;
5444 case LT:
5445 PUT_CODE (cond, GE);
5446 break;
5447 case GE:
5448 PUT_CODE (cond, LT);
5449 break;
5450 default:
5451 return false;
5453 INSN_CODE (insn) = -1;
5454 i = recog_memoized (insn);
5455 PUT_CODE (cond, old_code);
5456 INSN_CODE (insn) = -1;
5457 return i >= 0;
5460 static void
5461 mep_invert_branch (rtx insn, rtx after)
5463 rtx cond, set, label;
5464 int i;
5466 set = PATTERN (insn);
5468 gcc_assert (GET_CODE (set) == SET);
5469 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5471 cond = XEXP (XEXP (set, 1), 0);
5472 switch (GET_CODE (cond))
5474 case EQ:
5475 PUT_CODE (cond, NE);
5476 break;
5477 case NE:
5478 PUT_CODE (cond, EQ);
5479 break;
5480 case LT:
5481 PUT_CODE (cond, GE);
5482 break;
5483 case GE:
5484 PUT_CODE (cond, LT);
5485 break;
5486 default:
5487 gcc_unreachable ();
5489 label = gen_label_rtx ();
5490 emit_label_after (label, after);
5491 for (i=1; i<=2; i++)
5492 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5494 rtx ref = XEXP (XEXP (set, 1), i);
5495 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5496 delete_insn (XEXP (ref, 0));
5497 XEXP (ref, 0) = label;
5498 LABEL_NUSES (label) ++;
5499 JUMP_LABEL (insn) = label;
5501 INSN_CODE (insn) = -1;
5502 i = recog_memoized (insn);
5503 gcc_assert (i >= 0);
5506 static void
5507 mep_reorg_erepeat (rtx insns)
5509 rtx insn, prev, l, x;
5510 int count;
5512 for (insn = insns; insn; insn = NEXT_INSN (insn))
5513 if (JUMP_P (insn)
5514 && ! JUMP_TABLE_DATA_P (insn)
5515 && mep_invertable_branch_p (insn))
5517 if (dump_file)
5519 fprintf (dump_file, "\n------------------------------\n");
5520 fprintf (dump_file, "erepeat: considering this jump:\n");
5521 print_rtl_single (dump_file, insn);
5523 count = simplejump_p (insn) ? 0 : 1;
5524 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5526 if (GET_CODE (prev) == CALL_INSN
5527 || BARRIER_P (prev))
5528 break;
5530 if (prev == JUMP_LABEL (insn))
5532 rtx newlast;
5533 if (dump_file)
5534 fprintf (dump_file, "found loop top, %d insns\n", count);
5536 if (LABEL_NUSES (prev) == 1)
5537 /* We're the only user, always safe */ ;
5538 else if (LABEL_NUSES (prev) == 2)
5540 /* See if there's a barrier before this label. If
5541 so, we know nobody inside the loop uses it.
5542 But we must be careful to put the erepeat
5543 *after* the label. */
5544 rtx barrier;
5545 for (barrier = PREV_INSN (prev);
5546 barrier && GET_CODE (barrier) == NOTE;
5547 barrier = PREV_INSN (barrier))
5549 if (barrier && GET_CODE (barrier) != BARRIER)
5550 break;
5552 else
5554 /* We don't know who else, within or without our loop, uses this */
5555 if (dump_file)
5556 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5557 break;
5560 /* Generate a label to be used by the erepat insn. */
5561 l = gen_label_rtx ();
5563 /* Insert the erepeat after INSN's target label. */
5564 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5565 LABEL_NUSES (l)++;
5566 emit_insn_after (x, prev);
5568 /* Insert the erepeat label. */
5569 newlast = (mep_insert_repeat_label_last
5570 (insn, l, !simplejump_p (insn), false));
5571 if (simplejump_p (insn))
5573 emit_insn_before (gen_erepeat_end (), insn);
5574 delete_insn (insn);
5576 else
5578 mep_invert_branch (insn, newlast);
5579 emit_insn_after (gen_erepeat_end (), newlast);
5581 break;
5584 if (LABEL_P (prev))
5586 /* A label is OK if there is exactly one user, and we
5587 can find that user before the next label. */
5588 rtx user = 0;
5589 int safe = 0;
5590 if (LABEL_NUSES (prev) == 1)
5592 for (user = PREV_INSN (prev);
5593 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5594 user = PREV_INSN (user))
5595 if (GET_CODE (user) == JUMP_INSN
5596 && JUMP_LABEL (user) == prev)
5598 safe = INSN_UID (user);
5599 break;
5602 if (!safe)
5603 break;
5604 if (dump_file)
5605 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5606 safe, INSN_UID (prev));
5609 if (INSN_P (prev))
5611 count ++;
5615 if (dump_file)
5616 fprintf (dump_file, "\n==============================\n");
5619 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5620 always do this on its own. */
5622 static void
5623 mep_jmp_return_reorg (rtx insns)
5625 rtx insn, label, ret;
5626 int ret_code;
5628 for (insn = insns; insn; insn = NEXT_INSN (insn))
5629 if (simplejump_p (insn))
5631 /* Find the fist real insn the jump jumps to. */
5632 label = ret = JUMP_LABEL (insn);
5633 while (ret
5634 && (GET_CODE (ret) == NOTE
5635 || GET_CODE (ret) == CODE_LABEL
5636 || GET_CODE (PATTERN (ret)) == USE))
5637 ret = NEXT_INSN (ret);
5639 if (ret)
5641 /* Is it a return? */
5642 ret_code = recog_memoized (ret);
5643 if (ret_code == CODE_FOR_return_internal
5644 || ret_code == CODE_FOR_eh_return_internal)
5646 /* It is. Replace the jump with a return. */
5647 LABEL_NUSES (label) --;
5648 if (LABEL_NUSES (label) == 0)
5649 delete_insn (label);
5650 PATTERN (insn) = copy_rtx (PATTERN (ret));
5651 INSN_CODE (insn) = -1;
5658 static void
5659 mep_reorg_addcombine (rtx insns)
5661 rtx i, n;
5663 for (i = insns; i; i = NEXT_INSN (i))
5664 if (INSN_P (i)
5665 && INSN_CODE (i) == CODE_FOR_addsi3
5666 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5667 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5668 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5669 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5671 n = NEXT_INSN (i);
5672 if (INSN_P (n)
5673 && INSN_CODE (n) == CODE_FOR_addsi3
5674 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5675 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5676 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5677 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5679 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5680 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5681 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5682 && ic + nc < 32767
5683 && ic + nc > -32768)
5685 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5686 NEXT_INSN (i) = NEXT_INSN (n);
5687 if (NEXT_INSN (i))
5688 PREV_INSN (NEXT_INSN (i)) = i;
5694 /* If this insn adjusts the stack, return the adjustment, else return
5695 zero. */
5696 static int
5697 add_sp_insn_p (rtx insn)
5699 rtx pat;
5701 if (! single_set (insn))
5702 return 0;
5703 pat = PATTERN (insn);
5704 if (GET_CODE (SET_DEST (pat)) != REG)
5705 return 0;
5706 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5707 return 0;
5708 if (GET_CODE (SET_SRC (pat)) != PLUS)
5709 return 0;
5710 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5711 return 0;
5712 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5713 return 0;
5714 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5715 return 0;
5716 return INTVAL (XEXP (SET_SRC (pat), 1));
5719 /* Check for trivial functions that set up an unneeded stack
5720 frame. */
5721 static void
5722 mep_reorg_noframe (rtx insns)
5724 rtx start_frame_insn;
5725 rtx end_frame_insn = 0;
5726 int sp_adjust, sp2;
5727 rtx sp;
5729 /* The first insn should be $sp = $sp + N */
5730 while (insns && ! INSN_P (insns))
5731 insns = NEXT_INSN (insns);
5732 if (!insns)
5733 return;
5735 sp_adjust = add_sp_insn_p (insns);
5736 if (sp_adjust == 0)
5737 return;
5739 start_frame_insn = insns;
5740 sp = SET_DEST (PATTERN (start_frame_insn));
5742 insns = next_real_insn (insns);
5744 while (insns)
5746 rtx next = next_real_insn (insns);
5747 if (!next)
5748 break;
5750 sp2 = add_sp_insn_p (insns);
5751 if (sp2)
5753 if (end_frame_insn)
5754 return;
5755 end_frame_insn = insns;
5756 if (sp2 != -sp_adjust)
5757 return;
5759 else if (mep_mentioned_p (insns, sp, 0))
5760 return;
5761 else if (CALL_P (insns))
5762 return;
5764 insns = next;
5767 if (end_frame_insn)
5769 delete_insn (start_frame_insn);
5770 delete_insn (end_frame_insn);
5774 static void
5775 mep_reorg (void)
5777 rtx insns = get_insns ();
5779 /* We require accurate REG_DEAD notes. */
5780 compute_bb_for_insn ();
5781 df_note_add_problem ();
5782 df_analyze ();
5784 mep_reorg_addcombine (insns);
5785 #if EXPERIMENTAL_REGMOVE_REORG
5786 /* VLIW packing has been done already, so we can't just delete things. */
5787 if (!mep_vliw_function_p (cfun->decl))
5788 mep_reorg_regmove (insns);
5789 #endif
5790 mep_jmp_return_reorg (insns);
5791 mep_bundle_insns (insns);
5792 mep_reorg_repeat (insns);
5793 if (optimize
5794 && !profile_flag
5795 && !profile_arc_flag
5796 && TARGET_OPT_REPEAT
5797 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5798 mep_reorg_erepeat (insns);
5800 /* This may delete *insns so make sure it's last. */
5801 mep_reorg_noframe (insns);
5803 df_finish_pass (false);
5808 /*----------------------------------------------------------------------*/
5809 /* Builtins */
5810 /*----------------------------------------------------------------------*/
5812 /* Element X gives the index into cgen_insns[] of the most general
5813 implementation of intrinsic X. Unimplemented intrinsics are
5814 mapped to -1. */
5815 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5817 /* Element X gives the index of another instruction that is mapped to
5818 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5819 instruction.
5821 Things are set up so that mep_intrinsic_chain[X] < X. */
5822 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5824 /* The bitmask for the current ISA. The ISA masks are declared
5825 in mep-intrin.h. */
5826 unsigned int mep_selected_isa;
5828 struct mep_config {
5829 const char *config_name;
5830 unsigned int isa;
5833 static struct mep_config mep_configs[] = {
5834 #ifdef COPROC_SELECTION_TABLE
5835 COPROC_SELECTION_TABLE,
5836 #endif
5837 { 0, 0 }
5840 /* Initialize the global intrinsics variables above. */
5842 static void
5843 mep_init_intrinsics (void)
5845 size_t i;
5847 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5848 mep_selected_isa = mep_configs[0].isa;
5849 if (mep_config_string != 0)
5850 for (i = 0; mep_configs[i].config_name; i++)
5851 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5853 mep_selected_isa = mep_configs[i].isa;
5854 break;
5857 /* Assume all intrinsics are unavailable. */
5858 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5859 mep_intrinsic_insn[i] = -1;
5861 /* Build up the global intrinsic tables. */
5862 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5863 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5865 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5866 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5868 /* See whether we can directly move values between one coprocessor
5869 register and another. */
5870 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5871 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5872 mep_have_copro_copro_moves_p = true;
5874 /* See whether we can directly move values between core and
5875 coprocessor registers. */
5876 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5877 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5879 mep_have_core_copro_moves_p = 1;
5882 /* Declare all available intrinsic functions. Called once only. */
5884 static tree cp_data_bus_int_type_node;
5885 static tree opaque_vector_type_node;
5886 static tree v8qi_type_node;
5887 static tree v4hi_type_node;
5888 static tree v2si_type_node;
5889 static tree v8uqi_type_node;
5890 static tree v4uhi_type_node;
5891 static tree v2usi_type_node;
5893 static tree
5894 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5896 switch (cr)
5898 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5899 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5900 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5901 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5902 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5903 case cgen_regnum_operand_type_CHAR: return char_type_node;
5904 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5905 case cgen_regnum_operand_type_SI: return intSI_type_node;
5906 case cgen_regnum_operand_type_DI: return intDI_type_node;
5907 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5908 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5909 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5910 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5911 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5912 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5913 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5914 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5915 default:
5916 return void_type_node;
5920 static void
5921 mep_init_builtins (void)
5923 size_t i;
5925 if (TARGET_64BIT_CR_REGS)
5926 cp_data_bus_int_type_node = long_long_integer_type_node;
5927 else
5928 cp_data_bus_int_type_node = long_integer_type_node;
5930 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5931 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5932 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5933 v2si_type_node = build_vector_type (intSI_type_node, 2);
5934 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5935 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5936 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5938 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5940 add_builtin_type ("cp_vector", opaque_vector_type_node);
5942 add_builtin_type ("cp_v8qi", v8qi_type_node);
5943 add_builtin_type ("cp_v4hi", v4hi_type_node);
5944 add_builtin_type ("cp_v2si", v2si_type_node);
5946 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5947 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5948 add_builtin_type ("cp_v2usi", v2usi_type_node);
5950 /* Intrinsics like mep_cadd3 are implemented with two groups of
5951 instructions, one which uses UNSPECs and one which uses a specific
5952 rtl code such as PLUS. Instructions in the latter group belong
5953 to GROUP_KNOWN_CODE.
5955 In such cases, the intrinsic will have two entries in the global
5956 tables above. The unspec form is accessed using builtin functions
5957 while the specific form is accessed using the mep_* enum in
5958 mep-intrin.h.
5960 The idea is that __cop arithmetic and builtin functions have
5961 different optimization requirements. If mep_cadd3() appears in
5962 the source code, the user will surely except gcc to use cadd3
5963 rather than a work-alike such as add3. However, if the user
5964 just writes "a + b", where a or b are __cop variables, it is
5965 reasonable for gcc to choose a core instruction rather than
5966 cadd3 if it believes that is more optimal. */
5967 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5968 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5969 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5971 tree ret_type = void_type_node;
5972 tree bi_type;
5974 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5975 continue;
5977 if (cgen_insns[i].cret_p)
5978 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5980 bi_type = build_function_type_list (ret_type, NULL_TREE);
5981 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5982 bi_type,
5983 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5987 /* Report the unavailablity of the given intrinsic. */
5989 #if 1
5990 static void
5991 mep_intrinsic_unavailable (int intrinsic)
5993 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
5995 if (already_reported_p[intrinsic])
5996 return;
5998 if (mep_intrinsic_insn[intrinsic] < 0)
5999 error ("coprocessor intrinsic %qs is not available in this configuration",
6000 cgen_intrinsics[intrinsic]);
6001 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6002 error ("%qs is not available in VLIW functions",
6003 cgen_intrinsics[intrinsic]);
6004 else
6005 error ("%qs is not available in non-VLIW functions",
6006 cgen_intrinsics[intrinsic]);
6008 already_reported_p[intrinsic] = 1;
6010 #endif
6013 /* See if any implementation of INTRINSIC is available to the
6014 current function. If so, store the most general implementation
6015 in *INSN_PTR and return true. Return false otherwise. */
6017 static bool
6018 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6020 int i;
6022 i = mep_intrinsic_insn[intrinsic];
6023 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6024 i = mep_intrinsic_chain[i];
6026 if (i >= 0)
6028 *insn_ptr = &cgen_insns[i];
6029 return true;
6031 return false;
6035 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6036 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6037 try using a work-alike instead. In this case, the returned insn
6038 may have three operands rather than two. */
6040 static bool
6041 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6043 size_t i;
6045 if (intrinsic == mep_cmov)
6047 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6048 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6049 return true;
6050 return false;
6052 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6056 /* If ARG is a register operand that is the same size as MODE, convert it
6057 to MODE using a subreg. Otherwise return ARG as-is. */
6059 static rtx
6060 mep_convert_arg (enum machine_mode mode, rtx arg)
6062 if (GET_MODE (arg) != mode
6063 && register_operand (arg, VOIDmode)
6064 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6065 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6066 return arg;
6070 /* Apply regnum conversions to ARG using the description given by REGNUM.
6071 Return the new argument on success and null on failure. */
6073 static rtx
6074 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6076 if (regnum->count == 0)
6077 return arg;
6079 if (GET_CODE (arg) != CONST_INT
6080 || INTVAL (arg) < 0
6081 || INTVAL (arg) >= regnum->count)
6082 return 0;
6084 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6088 /* Try to make intrinsic argument ARG match the given operand.
6089 UNSIGNED_P is true if the argument has an unsigned type. */
6091 static rtx
6092 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6093 int unsigned_p)
6095 if (GET_CODE (arg) == CONST_INT)
6097 /* CONST_INTs can only be bound to integer operands. */
6098 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6099 return 0;
6101 else if (GET_CODE (arg) == CONST_DOUBLE)
6102 /* These hold vector constants. */;
6103 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6105 /* If the argument is a different size from what's expected, we must
6106 have a value in the right mode class in order to convert it. */
6107 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6108 return 0;
6110 /* If the operand is an rvalue, promote or demote it to match the
6111 operand's size. This might not need extra instructions when
6112 ARG is a register value. */
6113 if (operand->constraint[0] != '=')
6114 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6117 /* If the operand is an lvalue, bind the operand to a new register.
6118 The caller will copy this value into ARG after the main
6119 instruction. By doing this always, we produce slightly more
6120 optimal code. */
6121 /* But not for control registers. */
6122 if (operand->constraint[0] == '='
6123 && (! REG_P (arg)
6124 || ! (CONTROL_REGNO_P (REGNO (arg))
6125 || CCR_REGNO_P (REGNO (arg))
6126 || CR_REGNO_P (REGNO (arg)))
6128 return gen_reg_rtx (operand->mode);
6130 /* Try simple mode punning. */
6131 arg = mep_convert_arg (operand->mode, arg);
6132 if (operand->predicate (arg, operand->mode))
6133 return arg;
6135 /* See if forcing the argument into a register will make it match. */
6136 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6137 arg = force_reg (operand->mode, arg);
6138 else
6139 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6140 if (operand->predicate (arg, operand->mode))
6141 return arg;
6143 return 0;
6147 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6148 function FNNAME. OPERAND describes the operand to which ARGNUM
6149 is mapped. */
6151 static void
6152 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6153 int argnum, tree fnname)
6155 size_t i;
6157 if (GET_CODE (arg) == CONST_INT)
6158 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6159 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6161 const struct cgen_immediate_predicate *predicate;
6162 HOST_WIDE_INT argval;
6164 predicate = &cgen_immediate_predicates[i];
6165 argval = INTVAL (arg);
6166 if (argval < predicate->lower || argval >= predicate->upper)
6167 error ("argument %d of %qE must be in the range %d...%d",
6168 argnum, fnname, predicate->lower, predicate->upper - 1);
6169 else
6170 error ("argument %d of %qE must be a multiple of %d",
6171 argnum, fnname, predicate->align);
6172 return;
6175 error ("incompatible type for argument %d of %qE", argnum, fnname);
6178 static rtx
6179 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6180 rtx subtarget ATTRIBUTE_UNUSED,
6181 enum machine_mode mode ATTRIBUTE_UNUSED,
6182 int ignore ATTRIBUTE_UNUSED)
6184 rtx pat, op[10], arg[10];
6185 unsigned int a;
6186 int opindex, unsigned_p[10];
6187 tree fndecl, args;
6188 unsigned int n_args;
6189 tree fnname;
6190 const struct cgen_insn *cgen_insn;
6191 const struct insn_data_d *idata;
6192 unsigned int first_arg = 0;
6193 unsigned int builtin_n_args;
6195 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6196 fnname = DECL_NAME (fndecl);
6198 /* Find out which instruction we should emit. Note that some coprocessor
6199 intrinsics may only be available in VLIW mode, or only in normal mode. */
6200 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6202 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6203 return NULL_RTX;
6205 idata = &insn_data[cgen_insn->icode];
6207 builtin_n_args = cgen_insn->num_args;
6209 if (cgen_insn->cret_p)
6211 if (cgen_insn->cret_p > 1)
6212 builtin_n_args ++;
6213 first_arg = 1;
6214 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6215 builtin_n_args --;
6218 /* Evaluate each argument. */
6219 n_args = call_expr_nargs (exp);
6221 if (n_args < builtin_n_args)
6223 error ("too few arguments to %qE", fnname);
6224 return NULL_RTX;
6226 if (n_args > builtin_n_args)
6228 error ("too many arguments to %qE", fnname);
6229 return NULL_RTX;
6232 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6234 tree value;
6236 args = CALL_EXPR_ARG (exp, a - first_arg);
6238 value = args;
6240 #if 0
6241 if (cgen_insn->regnums[a].reference_p)
6243 if (TREE_CODE (value) != ADDR_EXPR)
6245 debug_tree(value);
6246 error ("argument %d of %qE must be an address", a+1, fnname);
6247 return NULL_RTX;
6249 value = TREE_OPERAND (value, 0);
6251 #endif
6253 /* If the argument has been promoted to int, get the unpromoted
6254 value. This is necessary when sub-int memory values are bound
6255 to reference parameters. */
6256 if (TREE_CODE (value) == NOP_EXPR
6257 && TREE_TYPE (value) == integer_type_node
6258 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6259 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6260 < TYPE_PRECISION (TREE_TYPE (value))))
6261 value = TREE_OPERAND (value, 0);
6263 /* If the argument has been promoted to double, get the unpromoted
6264 SFmode value. This is necessary for FMAX support, for example. */
6265 if (TREE_CODE (value) == NOP_EXPR
6266 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6267 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6268 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6269 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6270 value = TREE_OPERAND (value, 0);
6272 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6273 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6274 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6275 if (cgen_insn->regnums[a].reference_p)
6277 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6278 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6280 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6282 if (arg[a] == 0)
6284 error ("argument %d of %qE must be in the range %d...%d",
6285 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6286 return NULL_RTX;
6290 for (a = 0; a < first_arg; a++)
6292 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6293 arg[a] = target;
6294 else
6295 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6298 /* Convert the arguments into a form suitable for the intrinsic.
6299 Report an error if this isn't possible. */
6300 for (opindex = 0; opindex < idata->n_operands; opindex++)
6302 a = cgen_insn->op_mapping[opindex];
6303 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6304 arg[a], unsigned_p[a]);
6305 if (op[opindex] == 0)
6307 mep_incompatible_arg (&idata->operand[opindex],
6308 arg[a], a + 1 - first_arg, fnname);
6309 return NULL_RTX;
6313 /* Emit the instruction. */
6314 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6315 op[5], op[6], op[7], op[8], op[9]);
6317 if (GET_CODE (pat) == SET
6318 && GET_CODE (SET_DEST (pat)) == PC
6319 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6320 emit_jump_insn (pat);
6321 else
6322 emit_insn (pat);
6324 /* Copy lvalues back to their final locations. */
6325 for (opindex = 0; opindex < idata->n_operands; opindex++)
6326 if (idata->operand[opindex].constraint[0] == '=')
6328 a = cgen_insn->op_mapping[opindex];
6329 if (a >= first_arg)
6331 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6332 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6333 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6334 op[opindex]));
6335 else
6337 /* First convert the operand to the right mode, then copy it
6338 into the destination. Doing the conversion as a separate
6339 step (rather than using convert_move) means that we can
6340 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6341 refer to the same register. */
6342 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6343 op[opindex], unsigned_p[a]);
6344 if (!rtx_equal_p (arg[a], op[opindex]))
6345 emit_move_insn (arg[a], op[opindex]);
6350 if (first_arg > 0 && target && target != op[0])
6352 emit_move_insn (target, op[0]);
6355 return target;
6358 static bool
6359 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6361 return false;
6364 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6365 a global register. */
6367 static int
6368 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6370 int regno;
6371 rtx x = *loc;
6373 if (! x)
6374 return 0;
6376 switch (GET_CODE (x))
6378 case SUBREG:
6379 if (REG_P (SUBREG_REG (x)))
6381 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6382 && global_regs[subreg_regno (x)])
6383 return 1;
6384 return 0;
6386 break;
6388 case REG:
6389 regno = REGNO (x);
6390 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6391 return 1;
6392 return 0;
6394 case SCRATCH:
6395 case PC:
6396 case CC0:
6397 case CONST_INT:
6398 case CONST_DOUBLE:
6399 case CONST:
6400 case LABEL_REF:
6401 return 0;
6403 case CALL:
6404 /* A non-constant call might use a global register. */
6405 return 1;
6407 default:
6408 break;
6411 return 0;
6414 /* Returns nonzero if X mentions a global register. */
6416 static int
6417 global_reg_mentioned_p (rtx x)
6419 if (INSN_P (x))
6421 if (CALL_P (x))
6423 if (! RTL_CONST_OR_PURE_CALL_P (x))
6424 return 1;
6425 x = CALL_INSN_FUNCTION_USAGE (x);
6426 if (x == 0)
6427 return 0;
6429 else
6430 x = PATTERN (x);
6433 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6435 /* Scheduling hooks for VLIW mode.
6437 Conceptually this is very simple: we have a two-pack architecture
6438 that takes one core insn and one coprocessor insn to make up either
6439 a 32- or 64-bit instruction word (depending on the option bit set in
6440 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6441 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6442 and one 48-bit cop insn or two 32-bit core/cop insns.
6444 In practice, instruction selection will be a bear. Consider in
6445 VL64 mode the following insns
6447 add $1, 1
6448 cmov $cr0, $0
6450 these cannot pack, since the add is a 16-bit core insn and cmov
6451 is a 32-bit cop insn. However,
6453 add3 $1, $1, 1
6454 cmov $cr0, $0
6456 packs just fine. For good VLIW code generation in VL64 mode, we
6457 will have to have 32-bit alternatives for many of the common core
6458 insns. Not implemented. */
6460 static int
6461 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6463 int cost_specified;
6465 if (REG_NOTE_KIND (link) != 0)
6467 /* See whether INSN and DEP_INSN are intrinsics that set the same
6468 hard register. If so, it is more important to free up DEP_INSN
6469 than it is to free up INSN.
6471 Note that intrinsics like mep_mulr are handled differently from
6472 the equivalent mep.md patterns. In mep.md, if we don't care
6473 about the value of $lo and $hi, the pattern will just clobber
6474 the registers, not set them. Since clobbers don't count as
6475 output dependencies, it is often possible to reorder two mulrs,
6476 even after reload.
6478 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6479 so any pair of mep_mulr()s will be inter-dependent. We should
6480 therefore give the first mep_mulr() a higher priority. */
6481 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6482 && global_reg_mentioned_p (PATTERN (insn))
6483 && global_reg_mentioned_p (PATTERN (dep_insn)))
6484 return 1;
6486 /* If the dependence is an anti or output dependence, assume it
6487 has no cost. */
6488 return 0;
6491 /* If we can't recognize the insns, we can't really do anything. */
6492 if (recog_memoized (dep_insn) < 0)
6493 return cost;
6495 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6496 attribute instead. */
6497 if (!TARGET_H1)
6499 cost_specified = get_attr_latency (dep_insn);
6500 if (cost_specified != 0)
6501 return cost_specified;
6504 return cost;
6507 /* ??? We don't properly compute the length of a load/store insn,
6508 taking into account the addressing mode. */
6510 static int
6511 mep_issue_rate (void)
6513 return TARGET_IVC2 ? 3 : 2;
6516 /* Return true if function DECL was declared with the vliw attribute. */
6518 bool
6519 mep_vliw_function_p (tree decl)
6521 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6524 static rtx
6525 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6527 int i;
6529 for (i = nready - 1; i >= 0; --i)
6531 rtx insn = ready[i];
6532 if (recog_memoized (insn) >= 0
6533 && get_attr_slot (insn) == slot
6534 && get_attr_length (insn) == length)
6535 return insn;
6538 return NULL_RTX;
6541 static void
6542 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6544 int i;
6546 for (i = 0; i < nready; ++i)
6547 if (ready[i] == insn)
6549 for (; i < nready - 1; ++i)
6550 ready[i] = ready[i + 1];
6551 ready[i] = insn;
6552 return;
6555 gcc_unreachable ();
6558 static void
6559 mep_print_sched_insn (FILE *dump, rtx insn)
6561 const char *slots = "none";
6562 const char *name = NULL;
6563 int code;
6564 char buf[30];
6566 if (GET_CODE (PATTERN (insn)) == SET
6567 || GET_CODE (PATTERN (insn)) == PARALLEL)
6569 switch (get_attr_slots (insn))
6571 case SLOTS_CORE: slots = "core"; break;
6572 case SLOTS_C3: slots = "c3"; break;
6573 case SLOTS_P0: slots = "p0"; break;
6574 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6575 case SLOTS_P0_P1: slots = "p0,p1"; break;
6576 case SLOTS_P0S: slots = "p0s"; break;
6577 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6578 case SLOTS_P1: slots = "p1"; break;
6579 default:
6580 sprintf(buf, "%d", get_attr_slots (insn));
6581 slots = buf;
6582 break;
6585 if (GET_CODE (PATTERN (insn)) == USE)
6586 slots = "use";
6588 code = INSN_CODE (insn);
6589 if (code >= 0)
6590 name = get_insn_name (code);
6591 if (!name)
6592 name = "{unknown}";
6594 fprintf (dump,
6595 "insn %4d %4d %8s %s\n",
6596 code,
6597 INSN_UID (insn),
6598 name,
6599 slots);
6602 static int
6603 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6604 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6605 int *pnready, int clock ATTRIBUTE_UNUSED)
6607 int nready = *pnready;
6608 rtx core_insn, cop_insn;
6609 int i;
6611 if (dump && sched_verbose > 1)
6613 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6614 for (i=0; i<nready; i++)
6615 mep_print_sched_insn (dump, ready[i]);
6616 fprintf (dump, "\n");
6619 if (!mep_vliw_function_p (cfun->decl))
6620 return 1;
6621 if (nready < 2)
6622 return 1;
6624 /* IVC2 uses a DFA to determine what's ready and what's not. */
6625 if (TARGET_IVC2)
6626 return nready;
6628 /* We can issue either a core or coprocessor instruction.
6629 Look for a matched pair of insns to reorder. If we don't
6630 find any, don't second-guess the scheduler's priorities. */
6632 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6633 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6634 TARGET_OPT_VL64 ? 6 : 2)))
6636 else if (TARGET_OPT_VL64
6637 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6638 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6640 else
6641 /* We didn't find a pair. Issue the single insn at the head
6642 of the ready list. */
6643 return 1;
6645 /* Reorder the two insns first. */
6646 mep_move_ready_insn (ready, nready, core_insn);
6647 mep_move_ready_insn (ready, nready - 1, cop_insn);
6648 return 2;
6651 /* A for_each_rtx callback. Return true if *X is a register that is
6652 set by insn PREV. */
6654 static int
6655 mep_store_find_set (rtx *x, void *prev)
6657 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6660 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6661 not the containing insn. */
6663 static bool
6664 mep_store_data_bypass_1 (rtx prev, rtx pat)
6666 /* Cope with intrinsics like swcpa. */
6667 if (GET_CODE (pat) == PARALLEL)
6669 int i;
6671 for (i = 0; i < XVECLEN (pat, 0); i++)
6672 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6673 return true;
6675 return false;
6678 /* Check for some sort of store. */
6679 if (GET_CODE (pat) != SET
6680 || GET_CODE (SET_DEST (pat)) != MEM)
6681 return false;
6683 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6684 The first operand to the unspec is the store data and the other operands
6685 are used to calculate the address. */
6686 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6688 rtx src;
6689 int i;
6691 src = SET_SRC (pat);
6692 for (i = 1; i < XVECLEN (src, 0); i++)
6693 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6694 return false;
6696 return true;
6699 /* Otherwise just check that PREV doesn't modify any register mentioned
6700 in the memory destination. */
6701 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6704 /* Return true if INSN is a store instruction and if the store address
6705 has no true dependence on PREV. */
6707 bool
6708 mep_store_data_bypass_p (rtx prev, rtx insn)
6710 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6713 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6714 is a register other than LO or HI and if PREV sets *X. */
6716 static int
6717 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6719 return (REG_P (*x)
6720 && REGNO (*x) != LO_REGNO
6721 && REGNO (*x) != HI_REGNO
6722 && reg_set_p (*x, (const_rtx) prev));
6725 /* Return true if, apart from HI/LO, there are no true dependencies
6726 between multiplication instructions PREV and INSN. */
6728 bool
6729 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6731 rtx pat;
6733 pat = PATTERN (insn);
6734 if (GET_CODE (pat) == PARALLEL)
6735 pat = XVECEXP (pat, 0, 0);
6736 return (GET_CODE (pat) == SET
6737 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6740 /* Return true if INSN is an ldc instruction that issues to the
6741 MeP-h1 integer pipeline. This is true for instructions that
6742 read from PSW, LP, SAR, HI and LO. */
6744 bool
6745 mep_ipipe_ldc_p (rtx insn)
6747 rtx pat, src;
6749 pat = PATTERN (insn);
6751 /* Cope with instrinsics that set both a hard register and its shadow.
6752 The set of the hard register comes first. */
6753 if (GET_CODE (pat) == PARALLEL)
6754 pat = XVECEXP (pat, 0, 0);
6756 if (GET_CODE (pat) == SET)
6758 src = SET_SRC (pat);
6760 /* Cope with intrinsics. The first operand to the unspec is
6761 the source register. */
6762 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6763 src = XVECEXP (src, 0, 0);
6765 if (REG_P (src))
6766 switch (REGNO (src))
6768 case PSW_REGNO:
6769 case LP_REGNO:
6770 case SAR_REGNO:
6771 case HI_REGNO:
6772 case LO_REGNO:
6773 return true;
6776 return false;
6779 /* Create a VLIW bundle from core instruction CORE and coprocessor
6780 instruction COP. COP always satisfies INSN_P, but CORE can be
6781 either a new pattern or an existing instruction.
6783 Emit the bundle in place of COP and return it. */
6785 static rtx
6786 mep_make_bundle (rtx core, rtx cop)
6788 rtx insn;
6790 /* If CORE is an existing instruction, remove it, otherwise put
6791 the new pattern in an INSN harness. */
6792 if (INSN_P (core))
6793 remove_insn (core);
6794 else
6795 core = make_insn_raw (core);
6797 /* Generate the bundle sequence and replace COP with it. */
6798 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6799 insn = emit_insn_after (insn, cop);
6800 remove_insn (cop);
6802 /* Set up the links of the insns inside the SEQUENCE. */
6803 PREV_INSN (core) = PREV_INSN (insn);
6804 NEXT_INSN (core) = cop;
6805 PREV_INSN (cop) = core;
6806 NEXT_INSN (cop) = NEXT_INSN (insn);
6808 /* Set the VLIW flag for the coprocessor instruction. */
6809 PUT_MODE (core, VOIDmode);
6810 PUT_MODE (cop, BImode);
6812 /* Derive a location for the bundle. Individual instructions cannot
6813 have their own location because there can be no assembler labels
6814 between CORE and COP. */
6815 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core) ? core : cop);
6816 INSN_LOCATION (core) = 0;
6817 INSN_LOCATION (cop) = 0;
6819 return insn;
6822 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6824 static void
6825 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6827 rtx * pinsn = (rtx *) data;
6829 if (*pinsn && reg_mentioned_p (x, *pinsn))
6830 *pinsn = NULL_RTX;
6833 /* Return true if anything in insn X is (anti,output,true) dependent on
6834 anything in insn Y. */
6836 static int
6837 mep_insn_dependent_p (rtx x, rtx y)
6839 rtx tmp;
6841 gcc_assert (INSN_P (x));
6842 gcc_assert (INSN_P (y));
6844 tmp = PATTERN (y);
6845 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6846 if (tmp == NULL_RTX)
6847 return 1;
6849 tmp = PATTERN (x);
6850 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6851 if (tmp == NULL_RTX)
6852 return 1;
6854 return 0;
6857 static int
6858 core_insn_p (rtx insn)
6860 if (GET_CODE (PATTERN (insn)) == USE)
6861 return 0;
6862 if (get_attr_slot (insn) == SLOT_CORE)
6863 return 1;
6864 return 0;
6867 /* Mark coprocessor instructions that can be bundled together with
6868 the immediately preceding core instruction. This is later used
6869 to emit the "+" that tells the assembler to create a VLIW insn.
6871 For unbundled insns, the assembler will automatically add coprocessor
6872 nops, and 16-bit core nops. Due to an apparent oversight in the
6873 spec, the assembler will _not_ automatically add 32-bit core nops,
6874 so we have to emit those here.
6876 Called from mep_insn_reorg. */
6878 static void
6879 mep_bundle_insns (rtx insns)
6881 rtx insn, last = NULL_RTX, first = NULL_RTX;
6882 int saw_scheduling = 0;
6884 /* Only do bundling if we're in vliw mode. */
6885 if (!mep_vliw_function_p (cfun->decl))
6886 return;
6888 /* The first insn in a bundle are TImode, the remainder are
6889 VOIDmode. After this function, the first has VOIDmode and the
6890 rest have BImode. */
6892 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6894 /* First, move any NOTEs that are within a bundle, to the beginning
6895 of the bundle. */
6896 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6898 if (NOTE_P (insn) && first)
6899 /* Don't clear FIRST. */;
6901 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6902 first = insn;
6904 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6906 rtx note, prev;
6908 /* INSN is part of a bundle; FIRST is the first insn in that
6909 bundle. Move all intervening notes out of the bundle.
6910 In addition, since the debug pass may insert a label
6911 whenever the current line changes, set the location info
6912 for INSN to match FIRST. */
6914 INSN_LOCATION (insn) = INSN_LOCATION (first);
6916 note = PREV_INSN (insn);
6917 while (note && note != first)
6919 prev = PREV_INSN (note);
6921 if (NOTE_P (note))
6923 /* Remove NOTE from here... */
6924 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6925 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6926 /* ...and put it in here. */
6927 NEXT_INSN (note) = first;
6928 PREV_INSN (note) = PREV_INSN (first);
6929 NEXT_INSN (PREV_INSN (note)) = note;
6930 PREV_INSN (NEXT_INSN (note)) = note;
6933 note = prev;
6937 else if (!NONJUMP_INSN_P (insn))
6938 first = 0;
6941 /* Now fix up the bundles. */
6942 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6944 if (NOTE_P (insn))
6945 continue;
6947 if (!NONJUMP_INSN_P (insn))
6949 last = 0;
6950 continue;
6953 /* If we're not optimizing enough, there won't be scheduling
6954 info. We detect that here. */
6955 if (GET_MODE (insn) == TImode)
6956 saw_scheduling = 1;
6957 if (!saw_scheduling)
6958 continue;
6960 if (TARGET_IVC2)
6962 rtx core_insn = NULL_RTX;
6964 /* IVC2 slots are scheduled by DFA, so we just accept
6965 whatever the scheduler gives us. However, we must make
6966 sure the core insn (if any) is the first in the bundle.
6967 The IVC2 assembler can insert whatever NOPs are needed,
6968 and allows a COP insn to be first. */
6970 if (NONJUMP_INSN_P (insn)
6971 && GET_CODE (PATTERN (insn)) != USE
6972 && GET_MODE (insn) == TImode)
6974 for (last = insn;
6975 NEXT_INSN (last)
6976 && GET_MODE (NEXT_INSN (last)) == VOIDmode
6977 && NONJUMP_INSN_P (NEXT_INSN (last));
6978 last = NEXT_INSN (last))
6980 if (core_insn_p (last))
6981 core_insn = last;
6983 if (core_insn_p (last))
6984 core_insn = last;
6986 if (core_insn && core_insn != insn)
6988 /* Swap core insn to first in the bundle. */
6990 /* Remove core insn. */
6991 if (PREV_INSN (core_insn))
6992 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
6993 if (NEXT_INSN (core_insn))
6994 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
6996 /* Re-insert core insn. */
6997 PREV_INSN (core_insn) = PREV_INSN (insn);
6998 NEXT_INSN (core_insn) = insn;
7000 if (PREV_INSN (core_insn))
7001 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7002 PREV_INSN (insn) = core_insn;
7004 PUT_MODE (core_insn, TImode);
7005 PUT_MODE (insn, VOIDmode);
7009 /* The first insn has TImode, the rest have VOIDmode */
7010 if (GET_MODE (insn) == TImode)
7011 PUT_MODE (insn, VOIDmode);
7012 else
7013 PUT_MODE (insn, BImode);
7014 continue;
7017 PUT_MODE (insn, VOIDmode);
7018 if (recog_memoized (insn) >= 0
7019 && get_attr_slot (insn) == SLOT_COP)
7021 if (GET_CODE (insn) == JUMP_INSN
7022 || ! last
7023 || recog_memoized (last) < 0
7024 || get_attr_slot (last) != SLOT_CORE
7025 || (get_attr_length (insn)
7026 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7027 || mep_insn_dependent_p (insn, last))
7029 switch (get_attr_length (insn))
7031 case 8:
7032 break;
7033 case 6:
7034 insn = mep_make_bundle (gen_nop (), insn);
7035 break;
7036 case 4:
7037 if (TARGET_OPT_VL64)
7038 insn = mep_make_bundle (gen_nop32 (), insn);
7039 break;
7040 case 2:
7041 if (TARGET_OPT_VL64)
7042 error ("2 byte cop instructions are"
7043 " not allowed in 64-bit VLIW mode");
7044 else
7045 insn = mep_make_bundle (gen_nop (), insn);
7046 break;
7047 default:
7048 error ("unexpected %d byte cop instruction",
7049 get_attr_length (insn));
7050 break;
7053 else
7054 insn = mep_make_bundle (last, insn);
7057 last = insn;
7062 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7063 Return true on success. This function can fail if the intrinsic
7064 is unavailable or if the operands don't satisfy their predicates. */
7066 bool
7067 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7069 const struct cgen_insn *cgen_insn;
7070 const struct insn_data_d *idata;
7071 rtx newop[10];
7072 int i;
7074 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7075 return false;
7077 idata = &insn_data[cgen_insn->icode];
7078 for (i = 0; i < idata->n_operands; i++)
7080 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7081 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7082 return false;
7085 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7086 newop[3], newop[4], newop[5],
7087 newop[6], newop[7], newop[8]));
7089 return true;
7093 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7094 OPERANDS[0]. Report an error if the instruction could not
7095 be synthesized. OPERANDS[1] is a register_operand. For sign
7096 and zero extensions, it may be smaller than SImode. */
7098 bool
7099 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7100 rtx * operands ATTRIBUTE_UNUSED)
7102 return false;
7106 /* Likewise, but apply a binary operation to OPERANDS[1] and
7107 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7108 can be a general_operand.
7110 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7111 third operand. REG and REG3 take register operands only. */
7113 bool
7114 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7115 int ATTRIBUTE_UNUSED immediate3,
7116 int ATTRIBUTE_UNUSED reg,
7117 int ATTRIBUTE_UNUSED reg3,
7118 rtx * operands ATTRIBUTE_UNUSED)
7120 return false;
7123 static bool
7124 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7125 int opno ATTRIBUTE_UNUSED, int *total,
7126 bool ATTRIBUTE_UNUSED speed_t)
7128 switch (code)
7130 case CONST_INT:
7131 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7132 *total = 0;
7133 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7134 *total = 1;
7135 else
7136 *total = 3;
7137 return true;
7139 case SYMBOL_REF:
7140 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7141 return true;
7143 case MULT:
7144 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7145 ? COSTS_N_INSNS (3)
7146 : COSTS_N_INSNS (2));
7147 return true;
7149 return false;
7152 static int
7153 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7154 enum machine_mode mode ATTRIBUTE_UNUSED,
7155 addr_space_t as ATTRIBUTE_UNUSED,
7156 bool ATTRIBUTE_UNUSED speed_p)
7158 return 1;
7161 static void
7162 mep_asm_init_sections (void)
7164 based_section
7165 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7166 "\t.section .based,\"aw\"");
7168 tinybss_section
7169 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7170 "\t.section .sbss,\"aw\"");
7172 sdata_section
7173 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7174 "\t.section .sdata,\"aw\",@progbits");
7176 far_section
7177 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7178 "\t.section .far,\"aw\"");
7180 farbss_section
7181 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7182 "\t.section .farbss,\"aw\"");
7184 frodata_section
7185 = get_unnamed_section (0, output_section_asm_op,
7186 "\t.section .frodata,\"a\"");
7188 srodata_section
7189 = get_unnamed_section (0, output_section_asm_op,
7190 "\t.section .srodata,\"a\"");
7192 vtext_section
7193 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7194 "\t.section .vtext,\"axv\"\n\t.vliw");
7196 vftext_section
7197 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7198 "\t.section .vftext,\"axv\"\n\t.vliw");
7200 ftext_section
7201 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7202 "\t.section .ftext,\"ax\"\n\t.core");
7206 /* Initialize the GCC target structure. */
7208 #undef TARGET_ASM_FUNCTION_PROLOGUE
7209 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7210 #undef TARGET_ATTRIBUTE_TABLE
7211 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7212 #undef TARGET_COMP_TYPE_ATTRIBUTES
7213 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7214 #undef TARGET_INSERT_ATTRIBUTES
7215 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7216 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7217 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7218 #undef TARGET_CAN_INLINE_P
7219 #define TARGET_CAN_INLINE_P mep_can_inline_p
7220 #undef TARGET_SECTION_TYPE_FLAGS
7221 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7222 #undef TARGET_ASM_NAMED_SECTION
7223 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7224 #undef TARGET_INIT_BUILTINS
7225 #define TARGET_INIT_BUILTINS mep_init_builtins
7226 #undef TARGET_EXPAND_BUILTIN
7227 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7228 #undef TARGET_SCHED_ADJUST_COST
7229 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7230 #undef TARGET_SCHED_ISSUE_RATE
7231 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7232 #undef TARGET_SCHED_REORDER
7233 #define TARGET_SCHED_REORDER mep_sched_reorder
7234 #undef TARGET_STRIP_NAME_ENCODING
7235 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7236 #undef TARGET_ASM_SELECT_SECTION
7237 #define TARGET_ASM_SELECT_SECTION mep_select_section
7238 #undef TARGET_ASM_UNIQUE_SECTION
7239 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7240 #undef TARGET_ENCODE_SECTION_INFO
7241 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7242 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7243 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7244 #undef TARGET_RTX_COSTS
7245 #define TARGET_RTX_COSTS mep_rtx_cost
7246 #undef TARGET_ADDRESS_COST
7247 #define TARGET_ADDRESS_COST mep_address_cost
7248 #undef TARGET_MACHINE_DEPENDENT_REORG
7249 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7250 #undef TARGET_SETUP_INCOMING_VARARGS
7251 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7252 #undef TARGET_PASS_BY_REFERENCE
7253 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7254 #undef TARGET_FUNCTION_ARG
7255 #define TARGET_FUNCTION_ARG mep_function_arg
7256 #undef TARGET_FUNCTION_ARG_ADVANCE
7257 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7258 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7259 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7260 #undef TARGET_OPTION_OVERRIDE
7261 #define TARGET_OPTION_OVERRIDE mep_option_override
7262 #undef TARGET_ALLOCATE_INITIAL_VALUE
7263 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7264 #undef TARGET_ASM_INIT_SECTIONS
7265 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7266 #undef TARGET_RETURN_IN_MEMORY
7267 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7268 #undef TARGET_NARROW_VOLATILE_BITFIELD
7269 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7270 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7271 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7272 #undef TARGET_BUILD_BUILTIN_VA_LIST
7273 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7274 #undef TARGET_EXPAND_BUILTIN_VA_START
7275 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7276 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7277 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7278 #undef TARGET_CAN_ELIMINATE
7279 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7280 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7281 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7282 #undef TARGET_TRAMPOLINE_INIT
7283 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7284 #undef TARGET_LEGITIMATE_CONSTANT_P
7285 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7287 struct gcc_target targetm = TARGET_INITIALIZER;
7289 #include "gt-mep.h"