* tree.h (DECL_ONE_ONLY): Return true only for externally visible
[official-gcc.git] / gcc / config / mep / mep.c
blob107f1fa02d7aced83f56359ae6926fdd599a7199
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "varasm.h"
28 #include "calls.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "insn-flags.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "recog.h"
40 #include "obstack.h"
41 #include "tree.h"
42 #include "expr.h"
43 #include "except.h"
44 #include "function.h"
45 #include "optabs.h"
46 #include "reload.h"
47 #include "tm_p.h"
48 #include "ggc.h"
49 #include "diagnostic-core.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "langhooks.h"
53 #include "df.h"
54 #include "pointer-set.h"
55 #include "hash-table.h"
56 #include "vec.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-fold.h"
61 #include "tree-eh.h"
62 #include "gimple-expr.h"
63 #include "is-a.h"
64 #include "gimple.h"
65 #include "gimplify.h"
66 #include "opts.h"
67 #include "dumpfile.h"
69 /* Structure of this file:
71 + Command Line Option Support
72 + Pattern support - constraints, predicates, expanders
73 + Reload Support
74 + Costs
75 + Functions to save and restore machine-specific function data.
76 + Frame/Epilog/Prolog Related
77 + Operand Printing
78 + Function args in registers
79 + Handle pipeline hazards
80 + Handle attributes
81 + Trampolines
82 + Machine-dependent Reorg
83 + Builtins. */
85 /* Symbol encodings:
87 Symbols are encoded as @ <char> . <name> where <char> is one of these:
89 b - based
90 t - tiny
91 n - near
92 f - far
93 i - io, near
94 I - io, far
95 c - cb (control bus) */
97 struct GTY(()) machine_function
99 int mep_frame_pointer_needed;
101 /* For varargs. */
102 int arg_regs_to_save;
103 int regsave_filler;
104 int frame_filler;
105 int frame_locked;
107 /* Records __builtin_return address. */
108 rtx eh_stack_adjust;
110 int reg_save_size;
111 int reg_save_slot[FIRST_PSEUDO_REGISTER];
112 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
114 /* 2 if the current function has an interrupt attribute, 1 if not, 0
115 if unknown. This is here because resource.c uses EPILOGUE_USES
116 which needs it. */
117 int interrupt_handler;
119 /* Likewise, for disinterrupt attribute. */
120 int disable_interrupts;
122 /* Number of doloop tags used so far. */
123 int doloop_tags;
125 /* True if the last tag was allocated to a doloop_end. */
126 bool doloop_tag_from_end;
128 /* True if reload changes $TP. */
129 bool reload_changes_tp;
131 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
132 We only set this if the function is an interrupt handler. */
133 int asms_without_operands;
136 #define MEP_CONTROL_REG(x) \
137 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
139 static GTY(()) section * based_section;
140 static GTY(()) section * tinybss_section;
141 static GTY(()) section * far_section;
142 static GTY(()) section * farbss_section;
143 static GTY(()) section * frodata_section;
144 static GTY(()) section * srodata_section;
146 static GTY(()) section * vtext_section;
147 static GTY(()) section * vftext_section;
148 static GTY(()) section * ftext_section;
150 static void mep_set_leaf_registers (int);
151 static bool symbol_p (rtx);
152 static bool symbolref_p (rtx);
153 static void encode_pattern_1 (rtx);
154 static void encode_pattern (rtx);
155 static bool const_in_range (rtx, int, int);
156 static void mep_rewrite_mult (rtx, rtx);
157 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
158 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
159 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
160 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
161 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
162 static bool mep_nongeneral_reg (rtx);
163 static bool mep_general_copro_reg (rtx);
164 static bool mep_nonregister (rtx);
165 static struct machine_function* mep_init_machine_status (void);
166 static rtx mep_tp_rtx (void);
167 static rtx mep_gp_rtx (void);
168 static bool mep_interrupt_p (void);
169 static bool mep_disinterrupt_p (void);
170 static bool mep_reg_set_p (rtx, rtx);
171 static bool mep_reg_set_in_function (int);
172 static bool mep_interrupt_saved_reg (int);
173 static bool mep_call_saves_register (int);
174 static rtx F (rtx);
175 static void add_constant (int, int, int, int);
176 static rtx maybe_dead_move (rtx, rtx, bool);
177 static void mep_reload_pointer (int, const char *);
178 static void mep_start_function (FILE *, HOST_WIDE_INT);
179 static bool mep_function_ok_for_sibcall (tree, tree);
180 static int unique_bit_in (HOST_WIDE_INT);
181 static int bit_size_for_clip (HOST_WIDE_INT);
182 static int bytesize (const_tree, enum machine_mode);
183 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
184 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
185 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
186 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
187 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
188 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
189 static bool mep_function_attribute_inlinable_p (const_tree);
190 static bool mep_can_inline_p (tree, tree);
191 static bool mep_lookup_pragma_disinterrupt (const char *);
192 static int mep_multiple_address_regions (tree, bool);
193 static int mep_attrlist_to_encoding (tree, tree);
194 static void mep_insert_attributes (tree, tree *);
195 static void mep_encode_section_info (tree, rtx, int);
196 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
197 static void mep_unique_section (tree, int);
198 static unsigned int mep_section_type_flags (tree, const char *, int);
199 static void mep_asm_named_section (const char *, unsigned int, tree);
200 static bool mep_mentioned_p (rtx, rtx, int);
201 static void mep_reorg_regmove (rtx);
202 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
203 static void mep_reorg_repeat (rtx);
204 static bool mep_invertable_branch_p (rtx);
205 static void mep_invert_branch (rtx, rtx);
206 static void mep_reorg_erepeat (rtx);
207 static void mep_jmp_return_reorg (rtx);
208 static void mep_reorg_addcombine (rtx);
209 static void mep_reorg (void);
210 static void mep_init_intrinsics (void);
211 static void mep_init_builtins (void);
212 static void mep_intrinsic_unavailable (int);
213 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
214 static bool mep_get_move_insn (int, const struct cgen_insn **);
215 static rtx mep_convert_arg (enum machine_mode, rtx);
216 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
217 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
218 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
219 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
220 static int mep_adjust_cost (rtx, rtx, rtx, int);
221 static int mep_issue_rate (void);
222 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
223 static void mep_move_ready_insn (rtx *, int, rtx);
224 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
225 static rtx mep_make_bundle (rtx, rtx);
226 static void mep_bundle_insns (rtx);
227 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
228 static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
229 static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
230 tree, int *, int);
231 static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
232 const_tree, bool);
233 static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
234 const_tree, bool);
235 static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
236 const_tree, bool);
237 static bool mep_vector_mode_supported_p (enum machine_mode);
238 static rtx mep_allocate_initial_value (rtx);
239 static void mep_asm_init_sections (void);
240 static int mep_comp_type_attributes (const_tree, const_tree);
241 static bool mep_narrow_volatile_bitfield (void);
242 static rtx mep_expand_builtin_saveregs (void);
243 static tree mep_build_builtin_va_list (void);
244 static void mep_expand_va_start (tree, rtx);
245 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
246 static bool mep_can_eliminate (const int, const int);
247 static void mep_conditional_register_usage (void);
248 static void mep_trampoline_init (rtx, tree, rtx);
250 #define WANT_GCC_DEFINITIONS
251 #include "mep-intrin.h"
252 #undef WANT_GCC_DEFINITIONS
255 /* Command Line Option Support. */
257 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
259 /* True if we can use cmov instructions to move values back and forth
260 between core and coprocessor registers. */
261 bool mep_have_core_copro_moves_p;
263 /* True if we can use cmov instructions (or a work-alike) to move
264 values between coprocessor registers. */
265 bool mep_have_copro_copro_moves_p;
267 /* A table of all coprocessor instructions that can act like
268 a coprocessor-to-coprocessor cmov. */
269 static const int mep_cmov_insns[] = {
270 mep_cmov,
271 mep_cpmov,
272 mep_fmovs,
273 mep_caddi3,
274 mep_csubi3,
275 mep_candi3,
276 mep_cori3,
277 mep_cxori3,
278 mep_cand3,
279 mep_cor3
283 static void
284 mep_set_leaf_registers (int enable)
286 int i;
288 if (mep_leaf_registers[0] != enable)
289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
290 mep_leaf_registers[i] = enable;
293 static void
294 mep_conditional_register_usage (void)
296 int i;
298 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
300 fixed_regs[HI_REGNO] = 1;
301 fixed_regs[LO_REGNO] = 1;
302 call_used_regs[HI_REGNO] = 1;
303 call_used_regs[LO_REGNO] = 1;
306 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
307 global_regs[i] = 1;
310 static void
311 mep_option_override (void)
313 unsigned int i;
314 int j;
315 cl_deferred_option *opt;
316 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
318 if (v)
319 FOR_EACH_VEC_ELT (*v, i, opt)
321 switch (opt->opt_index)
323 case OPT_mivc2:
324 for (j = 0; j < 32; j++)
325 fixed_regs[j + 48] = 0;
326 for (j = 0; j < 32; j++)
327 call_used_regs[j + 48] = 1;
328 for (j = 6; j < 8; j++)
329 call_used_regs[j + 48] = 0;
331 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
332 RN (0, "$csar0");
333 RN (1, "$cc");
334 RN (4, "$cofr0");
335 RN (5, "$cofr1");
336 RN (6, "$cofa0");
337 RN (7, "$cofa1");
338 RN (15, "$csar1");
340 RN (16, "$acc0_0");
341 RN (17, "$acc0_1");
342 RN (18, "$acc0_2");
343 RN (19, "$acc0_3");
344 RN (20, "$acc0_4");
345 RN (21, "$acc0_5");
346 RN (22, "$acc0_6");
347 RN (23, "$acc0_7");
349 RN (24, "$acc1_0");
350 RN (25, "$acc1_1");
351 RN (26, "$acc1_2");
352 RN (27, "$acc1_3");
353 RN (28, "$acc1_4");
354 RN (29, "$acc1_5");
355 RN (30, "$acc1_6");
356 RN (31, "$acc1_7");
357 #undef RN
358 break;
360 default:
361 gcc_unreachable ();
365 if (flag_pic == 1)
366 warning (OPT_fpic, "-fpic is not supported");
367 if (flag_pic == 2)
368 warning (OPT_fPIC, "-fPIC is not supported");
369 if (TARGET_S && TARGET_M)
370 error ("only one of -ms and -mm may be given");
371 if (TARGET_S && TARGET_L)
372 error ("only one of -ms and -ml may be given");
373 if (TARGET_M && TARGET_L)
374 error ("only one of -mm and -ml may be given");
375 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
376 error ("only one of -ms and -mtiny= may be given");
377 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
378 error ("only one of -mm and -mtiny= may be given");
379 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
380 warning (0, "-mclip currently has no effect without -mminmax");
382 if (mep_const_section)
384 if (strcmp (mep_const_section, "tiny") != 0
385 && strcmp (mep_const_section, "near") != 0
386 && strcmp (mep_const_section, "far") != 0)
387 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
390 if (TARGET_S)
391 mep_tiny_cutoff = 65536;
392 if (TARGET_M)
393 mep_tiny_cutoff = 0;
394 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
395 mep_tiny_cutoff = 0;
397 if (TARGET_64BIT_CR_REGS)
398 flag_split_wide_types = 0;
400 init_machine_status = mep_init_machine_status;
401 mep_init_intrinsics ();
404 /* Pattern Support - constraints, predicates, expanders. */
406 /* MEP has very few instructions that can refer to the span of
407 addresses used by symbols, so it's common to check for them. */
409 static bool
410 symbol_p (rtx x)
412 int c = GET_CODE (x);
414 return (c == CONST_INT
415 || c == CONST
416 || c == SYMBOL_REF);
419 static bool
420 symbolref_p (rtx x)
422 int c;
424 if (GET_CODE (x) != MEM)
425 return false;
427 c = GET_CODE (XEXP (x, 0));
428 return (c == CONST_INT
429 || c == CONST
430 || c == SYMBOL_REF);
433 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
435 #define GEN_REG(R, STRICT) \
436 (GR_REGNO_P (R) \
437 || (!STRICT \
438 && ((R) == ARG_POINTER_REGNUM \
439 || (R) >= FIRST_PSEUDO_REGISTER)))
441 static char pattern[12], *patternp;
442 static GTY(()) rtx patternr[12];
443 #define RTX_IS(x) (strcmp (pattern, x) == 0)
445 static void
446 encode_pattern_1 (rtx x)
448 int i;
450 if (patternp == pattern + sizeof (pattern) - 2)
452 patternp[-1] = '?';
453 return;
456 patternr[patternp-pattern] = x;
458 switch (GET_CODE (x))
460 case REG:
461 *patternp++ = 'r';
462 break;
463 case MEM:
464 *patternp++ = 'm';
465 case CONST:
466 encode_pattern_1 (XEXP(x, 0));
467 break;
468 case PLUS:
469 *patternp++ = '+';
470 encode_pattern_1 (XEXP(x, 0));
471 encode_pattern_1 (XEXP(x, 1));
472 break;
473 case LO_SUM:
474 *patternp++ = 'L';
475 encode_pattern_1 (XEXP(x, 0));
476 encode_pattern_1 (XEXP(x, 1));
477 break;
478 case HIGH:
479 *patternp++ = 'H';
480 encode_pattern_1 (XEXP(x, 0));
481 break;
482 case SYMBOL_REF:
483 *patternp++ = 's';
484 break;
485 case LABEL_REF:
486 *patternp++ = 'l';
487 break;
488 case CONST_INT:
489 case CONST_DOUBLE:
490 *patternp++ = 'i';
491 break;
492 case UNSPEC:
493 *patternp++ = 'u';
494 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
495 for (i=0; i<XVECLEN (x, 0); i++)
496 encode_pattern_1 (XVECEXP (x, 0, i));
497 break;
498 case USE:
499 *patternp++ = 'U';
500 break;
501 default:
502 *patternp++ = '?';
503 #if 0
504 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
505 debug_rtx (x);
506 gcc_unreachable ();
507 #endif
508 break;
512 static void
513 encode_pattern (rtx x)
515 patternp = pattern;
516 encode_pattern_1 (x);
517 *patternp = 0;
521 mep_section_tag (rtx x)
523 const char *name;
525 while (1)
527 switch (GET_CODE (x))
529 case MEM:
530 case CONST:
531 x = XEXP (x, 0);
532 break;
533 case UNSPEC:
534 x = XVECEXP (x, 0, 0);
535 break;
536 case PLUS:
537 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
538 return 0;
539 x = XEXP (x, 0);
540 break;
541 default:
542 goto done;
545 done:
546 if (GET_CODE (x) != SYMBOL_REF)
547 return 0;
548 name = XSTR (x, 0);
549 if (name[0] == '@' && name[2] == '.')
551 if (name[1] == 'i' || name[1] == 'I')
553 if (name[1] == 'I')
554 return 'f'; /* near */
555 return 'n'; /* far */
557 return name[1];
559 return 0;
563 mep_regno_reg_class (int regno)
565 switch (regno)
567 case SP_REGNO: return SP_REGS;
568 case TP_REGNO: return TP_REGS;
569 case GP_REGNO: return GP_REGS;
570 case 0: return R0_REGS;
571 case HI_REGNO: return HI_REGS;
572 case LO_REGNO: return LO_REGS;
573 case ARG_POINTER_REGNUM: return GENERAL_REGS;
576 if (GR_REGNO_P (regno))
577 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
578 if (CONTROL_REGNO_P (regno))
579 return CONTROL_REGS;
581 if (CR_REGNO_P (regno))
583 int i, j;
585 /* Search for the register amongst user-defined subclasses of
586 the coprocessor registers. */
587 for (i = USER0_REGS; i <= USER3_REGS; ++i)
589 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
590 continue;
591 for (j = 0; j < N_REG_CLASSES; ++j)
593 enum reg_class sub = reg_class_subclasses[i][j];
595 if (sub == LIM_REG_CLASSES)
596 return i;
597 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
598 break;
602 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
605 if (CCR_REGNO_P (regno))
606 return CCR_REGS;
608 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
609 return NO_REGS;
612 static bool
613 const_in_range (rtx x, int minv, int maxv)
615 return (GET_CODE (x) == CONST_INT
616 && INTVAL (x) >= minv
617 && INTVAL (x) <= maxv);
620 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
621 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
622 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
623 at the end of the insn stream. */
626 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
628 if (rtx_equal_p (dest, src1))
629 return src2;
630 else if (rtx_equal_p (dest, src2))
631 return src1;
632 else
634 if (insn == 0)
635 emit_insn (gen_movsi (copy_rtx (dest), src1));
636 else
637 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
638 return src2;
642 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
643 Change the last element of PATTERN from (clobber (scratch:SI))
644 to (clobber (reg:SI HI_REGNO)). */
646 static void
647 mep_rewrite_mult (rtx insn, rtx pattern)
649 rtx hi_clobber;
651 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
652 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
653 PATTERN (insn) = pattern;
654 INSN_CODE (insn) = -1;
657 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
658 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
659 store the result in DEST if nonnull. */
661 static void
662 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
664 rtx lo, pattern;
666 lo = gen_rtx_REG (SImode, LO_REGNO);
667 if (dest)
668 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
669 mep_mulr_source (insn, dest, src1, src2));
670 else
671 pattern = gen_mulsi3_lo (lo, src1, src2);
672 mep_rewrite_mult (insn, pattern);
675 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
676 SRC3 into $lo, then use either madd or maddr. The move into $lo will
677 be deleted by a peephole2 if SRC3 is already in $lo. */
679 static void
680 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
682 rtx lo, pattern;
684 lo = gen_rtx_REG (SImode, LO_REGNO);
685 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
686 if (dest)
687 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
688 mep_mulr_source (insn, dest, src1, src2),
689 copy_rtx (lo));
690 else
691 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
692 mep_rewrite_mult (insn, pattern);
695 /* Return true if $lo has the same value as integer register GPR when
696 instruction INSN is reached. If necessary, rewrite the instruction
697 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
698 rtx for (reg:SI LO_REGNO).
700 This function is intended to be used by the peephole2 pass. Since
701 that pass goes from the end of a basic block to the beginning, and
702 propagates liveness information on the way, there is no need to
703 update register notes here.
705 If GPR_DEAD_P is true on entry, and this function returns true,
706 then the caller will replace _every_ use of GPR in and after INSN
707 with LO. This means that if the instruction that sets $lo is a
708 mulr- or maddr-type instruction, we can rewrite it to use mul or
709 madd instead. In combination with the copy progagation pass,
710 this allows us to replace sequences like:
712 mov GPR,R1
713 mulr GPR,R2
715 with:
717 mul R1,R2
719 if GPR is no longer used. */
721 static bool
722 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
726 insn = PREV_INSN (insn);
727 if (INSN_P (insn))
728 switch (recog_memoized (insn))
730 case CODE_FOR_mulsi3_1:
731 extract_insn (insn);
732 if (rtx_equal_p (recog_data.operand[0], gpr))
734 mep_rewrite_mulsi3 (insn,
735 gpr_dead_p ? NULL : recog_data.operand[0],
736 recog_data.operand[1],
737 recog_data.operand[2]);
738 return true;
740 return false;
742 case CODE_FOR_maddsi3:
743 extract_insn (insn);
744 if (rtx_equal_p (recog_data.operand[0], gpr))
746 mep_rewrite_maddsi3 (insn,
747 gpr_dead_p ? NULL : recog_data.operand[0],
748 recog_data.operand[1],
749 recog_data.operand[2],
750 recog_data.operand[3]);
751 return true;
753 return false;
755 case CODE_FOR_mulsi3r:
756 case CODE_FOR_maddsi3r:
757 extract_insn (insn);
758 return rtx_equal_p (recog_data.operand[1], gpr);
760 default:
761 if (reg_set_p (lo, insn)
762 || reg_set_p (gpr, insn)
763 || volatile_insn_p (PATTERN (insn)))
764 return false;
766 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
767 gpr_dead_p = false;
768 break;
771 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
772 return false;
775 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
777 bool
778 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
780 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
781 extract_insn (insn);
782 return result;
785 /* Return true if SET can be turned into a post-modify load or store
786 that adds OFFSET to GPR. In other words, return true if SET can be
787 changed into:
789 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
791 It's OK to change SET to an equivalent operation in order to
792 make it match. */
794 static bool
795 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
797 rtx *reg, *mem;
798 unsigned int reg_bytes, mem_bytes;
799 enum machine_mode reg_mode, mem_mode;
801 /* Only simple SETs can be converted. */
802 if (GET_CODE (set) != SET)
803 return false;
805 /* Point REG to what we hope will be the register side of the set and
806 MEM to what we hope will be the memory side. */
807 if (GET_CODE (SET_DEST (set)) == MEM)
809 mem = &SET_DEST (set);
810 reg = &SET_SRC (set);
812 else
814 reg = &SET_DEST (set);
815 mem = &SET_SRC (set);
816 if (GET_CODE (*mem) == SIGN_EXTEND)
817 mem = &XEXP (*mem, 0);
820 /* Check that *REG is a suitable coprocessor register. */
821 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
822 return false;
824 /* Check that *MEM is a suitable memory reference. */
825 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
826 return false;
828 /* Get the number of bytes in each operand. */
829 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
830 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
832 /* Check that OFFSET is suitably aligned. */
833 if (INTVAL (offset) & (mem_bytes - 1))
834 return false;
836 /* Convert *MEM to a normal integer mode. */
837 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
838 *mem = change_address (*mem, mem_mode, NULL);
840 /* Adjust *REG as well. */
841 *reg = shallow_copy_rtx (*reg);
842 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
844 /* SET is a subword load. Convert it to an explicit extension. */
845 PUT_MODE (*reg, SImode);
846 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
848 else
850 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
851 PUT_MODE (*reg, reg_mode);
853 return true;
856 /* Return the effect of frame-related instruction INSN. */
858 static rtx
859 mep_frame_expr (rtx insn)
861 rtx note, expr;
863 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
864 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
865 RTX_FRAME_RELATED_P (expr) = 1;
866 return expr;
869 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
870 new pattern in INSN1; INSN2 will be deleted by the caller. */
872 static void
873 mep_make_parallel (rtx insn1, rtx insn2)
875 rtx expr;
877 if (RTX_FRAME_RELATED_P (insn2))
879 expr = mep_frame_expr (insn2);
880 if (RTX_FRAME_RELATED_P (insn1))
881 expr = gen_rtx_SEQUENCE (VOIDmode,
882 gen_rtvec (2, mep_frame_expr (insn1), expr));
883 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
884 RTX_FRAME_RELATED_P (insn1) = 1;
887 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
888 gen_rtvec (2, PATTERN (insn1),
889 PATTERN (insn2)));
890 INSN_CODE (insn1) = -1;
893 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
894 the basic block to see if any previous load or store instruction can
895 be persuaded to do SET_INSN as a side-effect. Return true if so. */
897 static bool
898 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
900 rtx insn;
902 insn = set_insn;
905 insn = PREV_INSN (insn);
906 if (INSN_P (insn))
908 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
910 mep_make_parallel (insn, set_insn);
911 return true;
914 if (reg_set_p (reg, insn)
915 || reg_referenced_p (reg, PATTERN (insn))
916 || volatile_insn_p (PATTERN (insn)))
917 return false;
920 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
921 return false;
924 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
926 bool
927 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
929 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
930 extract_insn (insn);
931 return result;
934 bool
935 mep_allow_clip (rtx ux, rtx lx, int s)
937 HOST_WIDE_INT u = INTVAL (ux);
938 HOST_WIDE_INT l = INTVAL (lx);
939 int i;
941 if (!TARGET_OPT_CLIP)
942 return false;
944 if (s)
946 for (i = 0; i < 30; i ++)
947 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
948 && (l == - ((HOST_WIDE_INT) 1 << i)))
949 return true;
951 else
953 if (l != 0)
954 return false;
956 for (i = 0; i < 30; i ++)
957 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
958 return true;
960 return false;
963 bool
964 mep_bit_position_p (rtx x, bool looking_for)
966 if (GET_CODE (x) != CONST_INT)
967 return false;
968 switch ((int) INTVAL(x) & 0xff)
970 case 0x01: case 0x02: case 0x04: case 0x08:
971 case 0x10: case 0x20: case 0x40: case 0x80:
972 return looking_for;
973 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
974 case 0xef: case 0xdf: case 0xbf: case 0x7f:
975 return !looking_for;
977 return false;
980 static bool
981 move_needs_splitting (rtx dest, rtx src,
982 enum machine_mode mode ATTRIBUTE_UNUSED)
984 int s = mep_section_tag (src);
986 while (1)
988 if (GET_CODE (src) == CONST
989 || GET_CODE (src) == MEM)
990 src = XEXP (src, 0);
991 else if (GET_CODE (src) == SYMBOL_REF
992 || GET_CODE (src) == LABEL_REF
993 || GET_CODE (src) == PLUS)
994 break;
995 else
996 return false;
998 if (s == 'f'
999 || (GET_CODE (src) == PLUS
1000 && GET_CODE (XEXP (src, 1)) == CONST_INT
1001 && (INTVAL (XEXP (src, 1)) < -65536
1002 || INTVAL (XEXP (src, 1)) > 0xffffff))
1003 || (GET_CODE (dest) == REG
1004 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1005 return true;
1006 return false;
1009 bool
1010 mep_split_mov (rtx *operands, int symbolic)
1012 if (symbolic)
1014 if (move_needs_splitting (operands[0], operands[1], SImode))
1015 return true;
1016 return false;
1019 if (GET_CODE (operands[1]) != CONST_INT)
1020 return false;
1022 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1023 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1024 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1025 return false;
1027 if (((!reload_completed && !reload_in_progress)
1028 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1029 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1030 return false;
1032 return true;
1035 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1036 it to one specific value. So the insn chosen depends on whether
1037 the source and destination modes match. */
1039 bool
1040 mep_vliw_mode_match (rtx tgt)
1042 bool src_vliw = mep_vliw_function_p (cfun->decl);
1043 bool tgt_vliw = INTVAL (tgt);
1045 return src_vliw == tgt_vliw;
1048 /* Like the above, but also test for near/far mismatches. */
1050 bool
1051 mep_vliw_jmp_match (rtx tgt)
1053 bool src_vliw = mep_vliw_function_p (cfun->decl);
1054 bool tgt_vliw = INTVAL (tgt);
1056 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1057 return false;
1059 return src_vliw == tgt_vliw;
1062 bool
1063 mep_multi_slot (rtx x)
1065 return get_attr_slot (x) == SLOT_MULTI;
1068 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1070 static bool
1071 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1073 /* We can't convert symbol values to gp- or tp-rel values after
1074 reload, as reload might have used $gp or $tp for other
1075 purposes. */
1076 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1078 char e = mep_section_tag (x);
1079 return (e != 't' && e != 'b');
1081 return 1;
1084 /* Be careful not to use macros that need to be compiled one way for
1085 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1087 bool
1088 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1090 int the_tag;
1092 #define DEBUG_LEGIT 0
1093 #if DEBUG_LEGIT
1094 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1095 debug_rtx (x);
1096 #endif
1098 if (GET_CODE (x) == LO_SUM
1099 && GET_CODE (XEXP (x, 0)) == REG
1100 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1101 && CONSTANT_P (XEXP (x, 1)))
1103 if (GET_MODE_SIZE (mode) > 4)
1105 /* We will end up splitting this, and lo_sums are not
1106 offsettable for us. */
1107 #if DEBUG_LEGIT
1108 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1109 #endif
1110 return false;
1112 #if DEBUG_LEGIT
1113 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1114 #endif
1115 return true;
1118 if (GET_CODE (x) == REG
1119 && GEN_REG (REGNO (x), strict))
1121 #if DEBUG_LEGIT
1122 fprintf (stderr, " - yup, [reg]\n");
1123 #endif
1124 return true;
1127 if (GET_CODE (x) == PLUS
1128 && GET_CODE (XEXP (x, 0)) == REG
1129 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1130 && const_in_range (XEXP (x, 1), -32768, 32767))
1132 #if DEBUG_LEGIT
1133 fprintf (stderr, " - yup, [reg+const]\n");
1134 #endif
1135 return true;
1138 if (GET_CODE (x) == PLUS
1139 && GET_CODE (XEXP (x, 0)) == REG
1140 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1141 && GET_CODE (XEXP (x, 1)) == CONST
1142 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1143 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1144 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1145 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1147 #if DEBUG_LEGIT
1148 fprintf (stderr, " - yup, [reg+unspec]\n");
1149 #endif
1150 return true;
1153 the_tag = mep_section_tag (x);
1155 if (the_tag == 'f')
1157 #if DEBUG_LEGIT
1158 fprintf (stderr, " - nope, [far]\n");
1159 #endif
1160 return false;
1163 if (mode == VOIDmode
1164 && GET_CODE (x) == SYMBOL_REF)
1166 #if DEBUG_LEGIT
1167 fprintf (stderr, " - yup, call [symbol]\n");
1168 #endif
1169 return true;
1172 if ((mode == SImode || mode == SFmode)
1173 && CONSTANT_P (x)
1174 && mep_legitimate_constant_p (mode, x)
1175 && the_tag != 't' && the_tag != 'b')
1177 if (GET_CODE (x) != CONST_INT
1178 || (INTVAL (x) <= 0xfffff
1179 && INTVAL (x) >= 0
1180 && (INTVAL (x) % 4) == 0))
1182 #if DEBUG_LEGIT
1183 fprintf (stderr, " - yup, [const]\n");
1184 #endif
1185 return true;
1189 #if DEBUG_LEGIT
1190 fprintf (stderr, " - nope.\n");
1191 #endif
1192 return false;
1196 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1197 int type_i,
1198 int ind_levels ATTRIBUTE_UNUSED)
1200 enum reload_type type = (enum reload_type) type_i;
1202 if (GET_CODE (*x) == PLUS
1203 && GET_CODE (XEXP (*x, 0)) == MEM
1204 && GET_CODE (XEXP (*x, 1)) == REG)
1206 /* GCC will by default copy the MEM into a REG, which results in
1207 an invalid address. For us, the best thing to do is move the
1208 whole expression to a REG. */
1209 push_reload (*x, NULL_RTX, x, NULL,
1210 GENERAL_REGS, mode, VOIDmode,
1211 0, 0, opnum, type);
1212 return 1;
1215 if (GET_CODE (*x) == PLUS
1216 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1217 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1219 char e = mep_section_tag (XEXP (*x, 0));
1221 if (e != 't' && e != 'b')
1223 /* GCC thinks that (sym+const) is a valid address. Well,
1224 sometimes it is, this time it isn't. The best thing to
1225 do is reload the symbol to a register, since reg+int
1226 tends to work, and we can't just add the symbol and
1227 constant anyway. */
1228 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1229 GENERAL_REGS, mode, VOIDmode,
1230 0, 0, opnum, type);
1231 return 1;
1234 return 0;
1238 mep_core_address_length (rtx insn, int opn)
1240 rtx set = single_set (insn);
1241 rtx mem = XEXP (set, opn);
1242 rtx other = XEXP (set, 1-opn);
1243 rtx addr = XEXP (mem, 0);
1245 if (register_operand (addr, Pmode))
1246 return 2;
1247 if (GET_CODE (addr) == PLUS)
1249 rtx addend = XEXP (addr, 1);
1251 gcc_assert (REG_P (XEXP (addr, 0)));
1253 switch (REGNO (XEXP (addr, 0)))
1255 case STACK_POINTER_REGNUM:
1256 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1257 && mep_imm7a4_operand (addend, VOIDmode))
1258 return 2;
1259 break;
1261 case 13: /* TP */
1262 gcc_assert (REG_P (other));
1264 if (REGNO (other) >= 8)
1265 break;
1267 if (GET_CODE (addend) == CONST
1268 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1269 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1270 return 2;
1272 if (GET_CODE (addend) == CONST_INT
1273 && INTVAL (addend) >= 0
1274 && INTVAL (addend) <= 127
1275 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1276 return 2;
1277 break;
1281 return 4;
1285 mep_cop_address_length (rtx insn, int opn)
1287 rtx set = single_set (insn);
1288 rtx mem = XEXP (set, opn);
1289 rtx addr = XEXP (mem, 0);
1291 if (GET_CODE (mem) != MEM)
1292 return 2;
1293 if (register_operand (addr, Pmode))
1294 return 2;
1295 if (GET_CODE (addr) == POST_INC)
1296 return 2;
1298 return 4;
1301 #define DEBUG_EXPAND_MOV 0
1302 bool
1303 mep_expand_mov (rtx *operands, enum machine_mode mode)
1305 int i, t;
1306 int tag[2];
1307 rtx tpsym, tpoffs;
1308 int post_reload = 0;
1310 tag[0] = mep_section_tag (operands[0]);
1311 tag[1] = mep_section_tag (operands[1]);
1313 if (!reload_in_progress
1314 && !reload_completed
1315 && GET_CODE (operands[0]) != REG
1316 && GET_CODE (operands[0]) != SUBREG
1317 && GET_CODE (operands[1]) != REG
1318 && GET_CODE (operands[1]) != SUBREG)
1319 operands[1] = copy_to_mode_reg (mode, operands[1]);
1321 #if DEBUG_EXPAND_MOV
1322 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1323 reload_in_progress || reload_completed);
1324 debug_rtx (operands[0]);
1325 debug_rtx (operands[1]);
1326 #endif
1328 if (mode == DImode || mode == DFmode)
1329 return false;
1331 if (reload_in_progress || reload_completed)
1333 rtx r;
1335 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1336 cfun->machine->reload_changes_tp = true;
1338 if (tag[0] == 't' || tag[1] == 't')
1340 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1341 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1342 post_reload = 1;
1344 if (tag[0] == 'b' || tag[1] == 'b')
1346 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1347 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1348 post_reload = 1;
1350 if (cfun->machine->reload_changes_tp == true)
1351 post_reload = 1;
1354 if (!post_reload)
1356 rtx n;
1357 if (symbol_p (operands[1]))
1359 t = mep_section_tag (operands[1]);
1360 if (t == 'b' || t == 't')
1363 if (GET_CODE (operands[1]) == SYMBOL_REF)
1365 tpsym = operands[1];
1366 n = gen_rtx_UNSPEC (mode,
1367 gen_rtvec (1, operands[1]),
1368 t == 'b' ? UNS_TPREL : UNS_GPREL);
1369 n = gen_rtx_CONST (mode, n);
1371 else if (GET_CODE (operands[1]) == CONST
1372 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1373 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1374 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1376 tpsym = XEXP (XEXP (operands[1], 0), 0);
1377 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1378 n = gen_rtx_UNSPEC (mode,
1379 gen_rtvec (1, tpsym),
1380 t == 'b' ? UNS_TPREL : UNS_GPREL);
1381 n = gen_rtx_PLUS (mode, n, tpoffs);
1382 n = gen_rtx_CONST (mode, n);
1384 else if (GET_CODE (operands[1]) == CONST
1385 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1386 return false;
1387 else
1389 error ("unusual TP-relative address");
1390 return false;
1393 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1394 : mep_gp_rtx ()), n);
1395 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1396 #if DEBUG_EXPAND_MOV
1397 fprintf(stderr, "mep_expand_mov emitting ");
1398 debug_rtx(n);
1399 #endif
1400 return true;
1404 for (i=0; i < 2; i++)
1406 t = mep_section_tag (operands[i]);
1407 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1409 rtx sym, n, r;
1410 int u;
1412 sym = XEXP (operands[i], 0);
1413 if (GET_CODE (sym) == CONST
1414 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1415 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1417 if (t == 'b')
1419 r = mep_tp_rtx ();
1420 u = UNS_TPREL;
1422 else
1424 r = mep_gp_rtx ();
1425 u = UNS_GPREL;
1428 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1429 n = gen_rtx_CONST (Pmode, n);
1430 n = gen_rtx_PLUS (Pmode, r, n);
1431 operands[i] = replace_equiv_address (operands[i], n);
1436 if ((GET_CODE (operands[1]) != REG
1437 && MEP_CONTROL_REG (operands[0]))
1438 || (GET_CODE (operands[0]) != REG
1439 && MEP_CONTROL_REG (operands[1])))
1441 rtx temp;
1442 #if DEBUG_EXPAND_MOV
1443 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1444 #endif
1445 temp = gen_reg_rtx (mode);
1446 emit_move_insn (temp, operands[1]);
1447 operands[1] = temp;
1450 if (symbolref_p (operands[0])
1451 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1452 || (GET_MODE_SIZE (mode) != 4)))
1454 rtx temp;
1456 gcc_assert (!reload_in_progress && !reload_completed);
1458 temp = force_reg (Pmode, XEXP (operands[0], 0));
1459 operands[0] = replace_equiv_address (operands[0], temp);
1460 emit_move_insn (operands[0], operands[1]);
1461 return true;
1464 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1465 tag[1] = 0;
1467 if (symbol_p (operands[1])
1468 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1470 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1471 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1472 return true;
1475 if (symbolref_p (operands[1])
1476 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1478 rtx temp;
1480 if (reload_in_progress || reload_completed)
1481 temp = operands[0];
1482 else
1483 temp = gen_reg_rtx (Pmode);
1485 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1486 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1487 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1488 return true;
1491 return false;
1494 /* Cases where the pattern can't be made to use at all. */
1496 bool
1497 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1499 int i;
1501 #define DEBUG_MOV_OK 0
1502 #if DEBUG_MOV_OK
1503 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1504 mep_section_tag (operands[1]));
1505 debug_rtx (operands[0]);
1506 debug_rtx (operands[1]);
1507 #endif
1509 /* We want the movh patterns to get these. */
1510 if (GET_CODE (operands[1]) == HIGH)
1511 return false;
1513 /* We can't store a register to a far variable without using a
1514 scratch register to hold the address. Using far variables should
1515 be split by mep_emit_mov anyway. */
1516 if (mep_section_tag (operands[0]) == 'f'
1517 || mep_section_tag (operands[1]) == 'f')
1519 #if DEBUG_MOV_OK
1520 fprintf (stderr, " - no, f\n");
1521 #endif
1522 return false;
1524 i = mep_section_tag (operands[1]);
1525 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1526 /* These are supposed to be generated with adds of the appropriate
1527 register. During and after reload, however, we allow them to
1528 be accessed as normal symbols because adding a dependency on
1529 the base register now might cause problems. */
1531 #if DEBUG_MOV_OK
1532 fprintf (stderr, " - no, bt\n");
1533 #endif
1534 return false;
1537 /* The only moves we can allow involve at least one general
1538 register, so require it. */
1539 for (i = 0; i < 2; i ++)
1541 /* Allow subregs too, before reload. */
1542 rtx x = operands[i];
1544 if (GET_CODE (x) == SUBREG)
1545 x = XEXP (x, 0);
1546 if (GET_CODE (x) == REG
1547 && ! MEP_CONTROL_REG (x))
1549 #if DEBUG_MOV_OK
1550 fprintf (stderr, " - ok\n");
1551 #endif
1552 return true;
1555 #if DEBUG_MOV_OK
1556 fprintf (stderr, " - no, no gen reg\n");
1557 #endif
1558 return false;
1561 #define DEBUG_SPLIT_WIDE_MOVE 0
1562 void
1563 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1565 int i;
1567 #if DEBUG_SPLIT_WIDE_MOVE
1568 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1569 debug_rtx (operands[0]);
1570 debug_rtx (operands[1]);
1571 #endif
1573 for (i = 0; i <= 1; i++)
1575 rtx op = operands[i], hi, lo;
1577 switch (GET_CODE (op))
1579 case REG:
1581 unsigned int regno = REGNO (op);
1583 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1585 rtx i32;
1587 lo = gen_rtx_REG (SImode, regno);
1588 i32 = GEN_INT (32);
1589 hi = gen_rtx_ZERO_EXTRACT (SImode,
1590 gen_rtx_REG (DImode, regno),
1591 i32, i32);
1593 else
1595 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1596 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1599 break;
1601 case CONST_INT:
1602 case CONST_DOUBLE:
1603 case MEM:
1604 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1605 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1606 break;
1608 default:
1609 gcc_unreachable ();
1612 /* The high part of CR <- GPR moves must be done after the low part. */
1613 operands [i + 4] = lo;
1614 operands [i + 2] = hi;
1617 if (reg_mentioned_p (operands[2], operands[5])
1618 || GET_CODE (operands[2]) == ZERO_EXTRACT
1619 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1621 rtx tmp;
1623 /* Overlapping register pairs -- make sure we don't
1624 early-clobber ourselves. */
1625 tmp = operands[2];
1626 operands[2] = operands[4];
1627 operands[4] = tmp;
1628 tmp = operands[3];
1629 operands[3] = operands[5];
1630 operands[5] = tmp;
1633 #if DEBUG_SPLIT_WIDE_MOVE
1634 fprintf(stderr, "\033[34m");
1635 debug_rtx (operands[2]);
1636 debug_rtx (operands[3]);
1637 debug_rtx (operands[4]);
1638 debug_rtx (operands[5]);
1639 fprintf(stderr, "\033[0m");
1640 #endif
1643 /* Emit a setcc instruction in its entirity. */
1645 static bool
1646 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1648 rtx tmp;
1650 switch (code)
1652 case GT:
1653 case GTU:
1654 tmp = op1, op1 = op2, op2 = tmp;
1655 code = swap_condition (code);
1656 /* FALLTHRU */
1658 case LT:
1659 case LTU:
1660 op1 = force_reg (SImode, op1);
1661 emit_insn (gen_rtx_SET (VOIDmode, dest,
1662 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1663 return true;
1665 case EQ:
1666 if (op2 != const0_rtx)
1667 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1668 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1669 return true;
1671 case NE:
1672 /* Branchful sequence:
1673 mov dest, 0 16-bit
1674 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1675 mov dest, 1 16-bit
1677 Branchless sequence:
1678 add3 tmp, op1, -op2 32-bit (or mov + sub)
1679 sltu3 tmp, tmp, 1 16-bit
1680 xor3 dest, tmp, 1 32-bit
1682 if (optimize_size && op2 != const0_rtx)
1683 return false;
1685 if (op2 != const0_rtx)
1686 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1688 op2 = gen_reg_rtx (SImode);
1689 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1691 emit_insn (gen_rtx_SET (VOIDmode, dest,
1692 gen_rtx_XOR (SImode, op2, const1_rtx)));
1693 return true;
1695 case LE:
1696 if (GET_CODE (op2) != CONST_INT
1697 || INTVAL (op2) == 0x7ffffff)
1698 return false;
1699 op2 = GEN_INT (INTVAL (op2) + 1);
1700 return mep_expand_setcc_1 (LT, dest, op1, op2);
1702 case LEU:
1703 if (GET_CODE (op2) != CONST_INT
1704 || INTVAL (op2) == -1)
1705 return false;
1706 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1707 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1709 case GE:
1710 if (GET_CODE (op2) != CONST_INT
1711 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1712 return false;
1713 op2 = GEN_INT (INTVAL (op2) - 1);
1714 return mep_expand_setcc_1 (GT, dest, op1, op2);
1716 case GEU:
1717 if (GET_CODE (op2) != CONST_INT
1718 || op2 == const0_rtx)
1719 return false;
1720 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1721 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1723 default:
1724 gcc_unreachable ();
1728 bool
1729 mep_expand_setcc (rtx *operands)
1731 rtx dest = operands[0];
1732 enum rtx_code code = GET_CODE (operands[1]);
1733 rtx op0 = operands[2];
1734 rtx op1 = operands[3];
1736 return mep_expand_setcc_1 (code, dest, op0, op1);
1740 mep_expand_cbranch (rtx *operands)
1742 enum rtx_code code = GET_CODE (operands[0]);
1743 rtx op0 = operands[1];
1744 rtx op1 = operands[2];
1745 rtx tmp;
1747 restart:
1748 switch (code)
1750 case LT:
1751 if (mep_imm4_operand (op1, SImode))
1752 break;
1754 tmp = gen_reg_rtx (SImode);
1755 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1756 code = NE;
1757 op0 = tmp;
1758 op1 = const0_rtx;
1759 break;
1761 case GE:
1762 if (mep_imm4_operand (op1, SImode))
1763 break;
1765 tmp = gen_reg_rtx (SImode);
1766 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1768 code = EQ;
1769 op0 = tmp;
1770 op1 = const0_rtx;
1771 break;
1773 case EQ:
1774 case NE:
1775 if (! mep_reg_or_imm4_operand (op1, SImode))
1776 op1 = force_reg (SImode, op1);
1777 break;
1779 case LE:
1780 case GT:
1781 if (GET_CODE (op1) == CONST_INT
1782 && INTVAL (op1) != 0x7fffffff)
1784 op1 = GEN_INT (INTVAL (op1) + 1);
1785 code = (code == LE ? LT : GE);
1786 goto restart;
1789 tmp = gen_reg_rtx (SImode);
1790 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1792 code = (code == LE ? EQ : NE);
1793 op0 = tmp;
1794 op1 = const0_rtx;
1795 break;
1797 case LTU:
1798 if (op1 == const1_rtx)
1800 code = EQ;
1801 op1 = const0_rtx;
1802 break;
1805 tmp = gen_reg_rtx (SImode);
1806 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1807 code = NE;
1808 op0 = tmp;
1809 op1 = const0_rtx;
1810 break;
1812 case LEU:
1813 tmp = gen_reg_rtx (SImode);
1814 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1815 code = NE;
1816 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1817 code = EQ;
1818 else
1819 gcc_unreachable ();
1820 op0 = tmp;
1821 op1 = const0_rtx;
1822 break;
1824 case GTU:
1825 tmp = gen_reg_rtx (SImode);
1826 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1827 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1828 code = NE;
1829 op0 = tmp;
1830 op1 = const0_rtx;
1831 break;
1833 case GEU:
1834 tmp = gen_reg_rtx (SImode);
1835 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1836 code = NE;
1837 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1838 code = EQ;
1839 else
1840 gcc_unreachable ();
1841 op0 = tmp;
1842 op1 = const0_rtx;
1843 break;
1845 default:
1846 gcc_unreachable ();
1849 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1852 const char *
1853 mep_emit_cbranch (rtx *operands, int ne)
1855 if (GET_CODE (operands[1]) == REG)
1856 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1857 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1858 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1859 else
1860 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1863 void
1864 mep_expand_call (rtx *operands, int returns_value)
1866 rtx addr = operands[returns_value];
1867 rtx tp = mep_tp_rtx ();
1868 rtx gp = mep_gp_rtx ();
1870 gcc_assert (GET_CODE (addr) == MEM);
1872 addr = XEXP (addr, 0);
1874 if (! mep_call_address_operand (addr, VOIDmode))
1875 addr = force_reg (SImode, addr);
1877 if (! operands[returns_value+2])
1878 operands[returns_value+2] = const0_rtx;
1880 if (returns_value)
1881 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1882 operands[3], tp, gp));
1883 else
1884 emit_call_insn (gen_call_internal (addr, operands[1],
1885 operands[2], tp, gp));
1888 /* Aliasing Support. */
1890 /* If X is a machine specific address (i.e. a symbol or label being
1891 referenced as a displacement from the GOT implemented using an
1892 UNSPEC), then return the base term. Otherwise return X. */
1895 mep_find_base_term (rtx x)
1897 rtx base, term;
1898 int unspec;
1900 if (GET_CODE (x) != PLUS)
1901 return x;
1902 base = XEXP (x, 0);
1903 term = XEXP (x, 1);
1905 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1906 && base == mep_tp_rtx ())
1907 unspec = UNS_TPREL;
1908 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1909 && base == mep_gp_rtx ())
1910 unspec = UNS_GPREL;
1911 else
1912 return x;
1914 if (GET_CODE (term) != CONST)
1915 return x;
1916 term = XEXP (term, 0);
1918 if (GET_CODE (term) != UNSPEC
1919 || XINT (term, 1) != unspec)
1920 return x;
1922 return XVECEXP (term, 0, 0);
1925 /* Reload Support. */
1927 /* Return true if the registers in CLASS cannot represent the change from
1928 modes FROM to TO. */
1930 bool
1931 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1932 enum reg_class regclass)
1934 if (from == to)
1935 return false;
1937 /* 64-bit COP regs must remain 64-bit COP regs. */
1938 if (TARGET_64BIT_CR_REGS
1939 && (regclass == CR_REGS
1940 || regclass == LOADABLE_CR_REGS)
1941 && (GET_MODE_SIZE (to) < 8
1942 || GET_MODE_SIZE (from) < 8))
1943 return true;
1945 return false;
1948 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1950 static bool
1951 mep_general_reg (rtx x)
1953 while (GET_CODE (x) == SUBREG)
1954 x = XEXP (x, 0);
1955 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1958 static bool
1959 mep_nongeneral_reg (rtx x)
1961 while (GET_CODE (x) == SUBREG)
1962 x = XEXP (x, 0);
1963 return (GET_CODE (x) == REG
1964 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1967 static bool
1968 mep_general_copro_reg (rtx x)
1970 while (GET_CODE (x) == SUBREG)
1971 x = XEXP (x, 0);
1972 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1975 static bool
1976 mep_nonregister (rtx x)
1978 while (GET_CODE (x) == SUBREG)
1979 x = XEXP (x, 0);
1980 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1983 #define DEBUG_RELOAD 0
1985 /* Return the secondary reload class needed for moving value X to or
1986 from a register in coprocessor register class CLASS. */
1988 static enum reg_class
1989 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1991 if (mep_general_reg (x))
1992 /* We can do the move directly if mep_have_core_copro_moves_p,
1993 otherwise we need to go through memory. Either way, no secondary
1994 register is needed. */
1995 return NO_REGS;
1997 if (mep_general_copro_reg (x))
1999 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2000 if (mep_have_copro_copro_moves_p)
2001 return NO_REGS;
2003 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2004 if (mep_have_core_copro_moves_p)
2005 return GENERAL_REGS;
2007 /* Otherwise we need to do it through memory. No secondary
2008 register is needed. */
2009 return NO_REGS;
2012 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2013 && constraint_satisfied_p (x, CONSTRAINT_U))
2014 /* X is a memory value that we can access directly. */
2015 return NO_REGS;
2017 /* We have to move X into a GPR first and then copy it to
2018 the coprocessor register. The move from the GPR to the
2019 coprocessor might be done directly or through memory,
2020 depending on mep_have_core_copro_moves_p. */
2021 return GENERAL_REGS;
2024 /* Copying X to register in RCLASS. */
2026 enum reg_class
2027 mep_secondary_input_reload_class (enum reg_class rclass,
2028 enum machine_mode mode ATTRIBUTE_UNUSED,
2029 rtx x)
2031 int rv = NO_REGS;
2033 #if DEBUG_RELOAD
2034 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2035 debug_rtx (x);
2036 #endif
2038 if (reg_class_subset_p (rclass, CR_REGS))
2039 rv = mep_secondary_copro_reload_class (rclass, x);
2040 else if (MEP_NONGENERAL_CLASS (rclass)
2041 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2042 rv = GENERAL_REGS;
2044 #if DEBUG_RELOAD
2045 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2046 #endif
2047 return (enum reg_class) rv;
2050 /* Copying register in RCLASS to X. */
2052 enum reg_class
2053 mep_secondary_output_reload_class (enum reg_class rclass,
2054 enum machine_mode mode ATTRIBUTE_UNUSED,
2055 rtx x)
2057 int rv = NO_REGS;
2059 #if DEBUG_RELOAD
2060 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2061 debug_rtx (x);
2062 #endif
2064 if (reg_class_subset_p (rclass, CR_REGS))
2065 rv = mep_secondary_copro_reload_class (rclass, x);
2066 else if (MEP_NONGENERAL_CLASS (rclass)
2067 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2068 rv = GENERAL_REGS;
2070 #if DEBUG_RELOAD
2071 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2072 #endif
2074 return (enum reg_class) rv;
2077 /* Implement SECONDARY_MEMORY_NEEDED. */
2079 bool
2080 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2081 enum machine_mode mode ATTRIBUTE_UNUSED)
2083 if (!mep_have_core_copro_moves_p)
2085 if (reg_classes_intersect_p (rclass1, CR_REGS)
2086 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2087 return true;
2088 if (reg_classes_intersect_p (rclass2, CR_REGS)
2089 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2090 return true;
2091 if (!mep_have_copro_copro_moves_p
2092 && reg_classes_intersect_p (rclass1, CR_REGS)
2093 && reg_classes_intersect_p (rclass2, CR_REGS))
2094 return true;
2096 return false;
2099 void
2100 mep_expand_reload (rtx *operands, enum machine_mode mode)
2102 /* There are three cases for each direction:
2103 register, farsym
2104 control, farsym
2105 control, nearsym */
2107 int s0 = mep_section_tag (operands[0]) == 'f';
2108 int s1 = mep_section_tag (operands[1]) == 'f';
2109 int c0 = mep_nongeneral_reg (operands[0]);
2110 int c1 = mep_nongeneral_reg (operands[1]);
2111 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2113 #if DEBUG_RELOAD
2114 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2115 debug_rtx (operands[0]);
2116 debug_rtx (operands[1]);
2117 #endif
2119 switch (which)
2121 case 00: /* Don't know why this gets here. */
2122 case 02: /* general = far */
2123 emit_move_insn (operands[0], operands[1]);
2124 return;
2126 case 10: /* cr = mem */
2127 case 11: /* cr = cr */
2128 case 01: /* mem = cr */
2129 case 12: /* cr = far */
2130 emit_move_insn (operands[2], operands[1]);
2131 emit_move_insn (operands[0], operands[2]);
2132 return;
2134 case 20: /* far = general */
2135 emit_move_insn (operands[2], XEXP (operands[1], 0));
2136 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2137 return;
2139 case 21: /* far = cr */
2140 case 22: /* far = far */
2141 default:
2142 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2143 which, mode_name[mode]);
2144 debug_rtx (operands[0]);
2145 debug_rtx (operands[1]);
2146 gcc_unreachable ();
2150 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2151 can be moved directly into registers 0 to 7, but not into the rest.
2152 If so, and if the required class includes registers 0 to 7, restrict
2153 it to those registers. */
2155 enum reg_class
2156 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2158 switch (GET_CODE (x))
2160 case CONST_INT:
2161 if (INTVAL (x) >= 0x10000
2162 && INTVAL (x) < 0x01000000
2163 && (INTVAL (x) & 0xffff) != 0
2164 && reg_class_subset_p (TPREL_REGS, rclass))
2165 rclass = TPREL_REGS;
2166 break;
2168 case CONST:
2169 case SYMBOL_REF:
2170 case LABEL_REF:
2171 if (mep_section_tag (x) != 'f'
2172 && reg_class_subset_p (TPREL_REGS, rclass))
2173 rclass = TPREL_REGS;
2174 break;
2176 default:
2177 break;
2179 return rclass;
2182 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2183 moves, 4 for direct double-register moves, and 1000 for anything
2184 that requires a temporary register or temporary stack slot. */
2187 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2189 if (mep_have_copro_copro_moves_p
2190 && reg_class_subset_p (from, CR_REGS)
2191 && reg_class_subset_p (to, CR_REGS))
2193 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2194 return 4;
2195 return 2;
2197 if (reg_class_subset_p (from, CR_REGS)
2198 && reg_class_subset_p (to, CR_REGS))
2200 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2201 return 8;
2202 return 4;
2204 if (reg_class_subset_p (from, CR_REGS)
2205 || reg_class_subset_p (to, CR_REGS))
2207 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2208 return 4;
2209 return 2;
2211 if (mep_secondary_memory_needed (from, to, mode))
2212 return 1000;
2213 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2214 return 1000;
2216 if (GET_MODE_SIZE (mode) > 4)
2217 return 4;
2219 return 2;
2223 /* Functions to save and restore machine-specific function data. */
2225 static struct machine_function *
2226 mep_init_machine_status (void)
2228 return ggc_cleared_alloc<machine_function> ();
2231 static rtx
2232 mep_allocate_initial_value (rtx reg)
2234 int rss;
2236 if (GET_CODE (reg) != REG)
2237 return NULL_RTX;
2239 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2240 return NULL_RTX;
2242 /* In interrupt functions, the "initial" values of $gp and $tp are
2243 provided by the prologue. They are not necessarily the same as
2244 the values that the caller was using. */
2245 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2246 if (mep_interrupt_p ())
2247 return NULL_RTX;
2249 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2251 cfun->machine->reg_save_size += 4;
2252 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2255 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2256 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2260 mep_return_addr_rtx (int count)
2262 if (count != 0)
2263 return const0_rtx;
2265 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2268 static rtx
2269 mep_tp_rtx (void)
2271 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2274 static rtx
2275 mep_gp_rtx (void)
2277 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2280 static bool
2281 mep_interrupt_p (void)
2283 if (cfun->machine->interrupt_handler == 0)
2285 int interrupt_handler
2286 = (lookup_attribute ("interrupt",
2287 DECL_ATTRIBUTES (current_function_decl))
2288 != NULL_TREE);
2289 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2291 return cfun->machine->interrupt_handler == 2;
2294 static bool
2295 mep_disinterrupt_p (void)
2297 if (cfun->machine->disable_interrupts == 0)
2299 int disable_interrupts
2300 = (lookup_attribute ("disinterrupt",
2301 DECL_ATTRIBUTES (current_function_decl))
2302 != NULL_TREE);
2303 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2305 return cfun->machine->disable_interrupts == 2;
2309 /* Frame/Epilog/Prolog Related. */
2311 static bool
2312 mep_reg_set_p (rtx reg, rtx insn)
2314 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2315 if (INSN_P (insn))
2317 if (FIND_REG_INC_NOTE (insn, reg))
2318 return true;
2319 insn = PATTERN (insn);
2322 if (GET_CODE (insn) == SET
2323 && GET_CODE (XEXP (insn, 0)) == REG
2324 && GET_CODE (XEXP (insn, 1)) == REG
2325 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2326 return false;
2328 return set_of (reg, insn) != NULL_RTX;
2332 #define MEP_SAVES_UNKNOWN 0
2333 #define MEP_SAVES_YES 1
2334 #define MEP_SAVES_MAYBE 2
2335 #define MEP_SAVES_NO 3
2337 static bool
2338 mep_reg_set_in_function (int regno)
2340 rtx reg, insn;
2342 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2343 return true;
2345 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2346 return true;
2348 push_topmost_sequence ();
2349 insn = get_insns ();
2350 pop_topmost_sequence ();
2352 if (!insn)
2353 return false;
2355 reg = gen_rtx_REG (SImode, regno);
2357 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2358 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2359 return true;
2360 return false;
2363 static bool
2364 mep_asm_without_operands_p (void)
2366 if (cfun->machine->asms_without_operands == 0)
2368 rtx insn;
2370 push_topmost_sequence ();
2371 insn = get_insns ();
2372 pop_topmost_sequence ();
2374 cfun->machine->asms_without_operands = 1;
2375 while (insn)
2377 if (INSN_P (insn)
2378 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2380 cfun->machine->asms_without_operands = 2;
2381 break;
2383 insn = NEXT_INSN (insn);
2387 return cfun->machine->asms_without_operands == 2;
2390 /* Interrupt functions save/restore every call-preserved register, and
2391 any call-used register it uses (or all if it calls any function,
2392 since they may get clobbered there too). Here we check to see
2393 which call-used registers need saving. */
2395 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2396 && (r == FIRST_CCR_REGNO + 1 \
2397 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2398 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2400 static bool
2401 mep_interrupt_saved_reg (int r)
2403 if (!mep_interrupt_p ())
2404 return false;
2405 if (r == REGSAVE_CONTROL_TEMP
2406 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2407 return true;
2408 if (mep_asm_without_operands_p ()
2409 && (!fixed_regs[r]
2410 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2411 || IVC2_ISAVED_REG (r)))
2412 return true;
2413 if (!crtl->is_leaf)
2414 /* Function calls mean we need to save $lp. */
2415 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2416 return true;
2417 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2418 /* The interrupt handler might use these registers for repeat blocks,
2419 or it might call a function that does so. */
2420 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2421 return true;
2422 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2423 return false;
2424 /* Functions we call might clobber these. */
2425 if (call_used_regs[r] && !fixed_regs[r])
2426 return true;
2427 /* Additional registers that need to be saved for IVC2. */
2428 if (IVC2_ISAVED_REG (r))
2429 return true;
2431 return false;
2434 static bool
2435 mep_call_saves_register (int r)
2437 if (! cfun->machine->frame_locked)
2439 int rv = MEP_SAVES_NO;
2441 if (cfun->machine->reg_save_slot[r])
2442 rv = MEP_SAVES_YES;
2443 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2444 rv = MEP_SAVES_YES;
2445 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2446 rv = MEP_SAVES_YES;
2447 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2448 rv = MEP_SAVES_YES;
2449 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2450 /* We need these to have stack slots so that they can be set during
2451 unwinding. */
2452 rv = MEP_SAVES_YES;
2453 else if (mep_interrupt_saved_reg (r))
2454 rv = MEP_SAVES_YES;
2455 cfun->machine->reg_saved[r] = rv;
2457 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2460 /* Return true if epilogue uses register REGNO. */
2462 bool
2463 mep_epilogue_uses (int regno)
2465 /* Since $lp is a call-saved register, the generic code will normally
2466 mark it used in the epilogue if it needs to be saved and restored.
2467 However, when profiling is enabled, the profiling code will implicitly
2468 clobber $11. This case has to be handled specially both here and in
2469 mep_call_saves_register. */
2470 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2471 return true;
2472 /* Interrupt functions save/restore pretty much everything. */
2473 return (reload_completed && mep_interrupt_saved_reg (regno));
2476 static int
2477 mep_reg_size (int regno)
2479 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2480 return 8;
2481 return 4;
2484 /* Worker function for TARGET_CAN_ELIMINATE. */
2486 bool
2487 mep_can_eliminate (const int from, const int to)
2489 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2490 ? ! frame_pointer_needed
2491 : true);
2495 mep_elimination_offset (int from, int to)
2497 int reg_save_size;
2498 int i;
2499 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2500 int total_size;
2502 if (!cfun->machine->frame_locked)
2503 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2505 /* We don't count arg_regs_to_save in the arg pointer offset, because
2506 gcc thinks the arg pointer has moved along with the saved regs.
2507 However, we do count it when we adjust $sp in the prologue. */
2508 reg_save_size = 0;
2509 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2510 if (mep_call_saves_register (i))
2511 reg_save_size += mep_reg_size (i);
2513 if (reg_save_size % 8)
2514 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2515 else
2516 cfun->machine->regsave_filler = 0;
2518 /* This is what our total stack adjustment looks like. */
2519 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2521 if (total_size % 8)
2522 cfun->machine->frame_filler = 8 - (total_size % 8);
2523 else
2524 cfun->machine->frame_filler = 0;
2527 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2528 return reg_save_size + cfun->machine->regsave_filler;
2530 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2531 return cfun->machine->frame_filler + frame_size;
2533 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2534 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2536 gcc_unreachable ();
2539 static rtx
2540 F (rtx x)
2542 RTX_FRAME_RELATED_P (x) = 1;
2543 return x;
2546 /* Since the prologue/epilogue code is generated after optimization,
2547 we can't rely on gcc to split constants for us. So, this code
2548 captures all the ways to add a constant to a register in one logic
2549 chunk, including optimizing away insns we just don't need. This
2550 makes the prolog/epilog code easier to follow. */
2551 static void
2552 add_constant (int dest, int src, int value, int mark_frame)
2554 rtx insn;
2555 int hi, lo;
2557 if (src == dest && value == 0)
2558 return;
2560 if (value == 0)
2562 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2563 gen_rtx_REG (SImode, src));
2564 if (mark_frame)
2565 RTX_FRAME_RELATED_P(insn) = 1;
2566 return;
2569 if (value >= -32768 && value <= 32767)
2571 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2572 gen_rtx_REG (SImode, src),
2573 GEN_INT (value)));
2574 if (mark_frame)
2575 RTX_FRAME_RELATED_P(insn) = 1;
2576 return;
2579 /* Big constant, need to use a temp register. We use
2580 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2581 area is always small enough to directly add to). */
2583 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2584 lo = value & 0xffff;
2586 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2587 GEN_INT (hi));
2589 if (lo)
2591 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2592 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2593 GEN_INT (lo)));
2596 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2597 gen_rtx_REG (SImode, src),
2598 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2599 if (mark_frame)
2601 RTX_FRAME_RELATED_P(insn) = 1;
2602 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2603 gen_rtx_SET (SImode,
2604 gen_rtx_REG (SImode, dest),
2605 gen_rtx_PLUS (SImode,
2606 gen_rtx_REG (SImode, dest),
2607 GEN_INT (value))));
2611 /* Move SRC to DEST. Mark the move as being potentially dead if
2612 MAYBE_DEAD_P. */
2614 static rtx
2615 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2617 rtx insn = emit_move_insn (dest, src);
2618 #if 0
2619 if (maybe_dead_p)
2620 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2621 #endif
2622 return insn;
2625 /* Used for interrupt functions, which can't assume that $tp and $gp
2626 contain the correct pointers. */
2628 static void
2629 mep_reload_pointer (int regno, const char *symbol)
2631 rtx reg, sym;
2633 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2634 return;
2636 reg = gen_rtx_REG (SImode, regno);
2637 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2638 emit_insn (gen_movsi_topsym_s (reg, sym));
2639 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2642 /* Assign save slots for any register not already saved. DImode
2643 registers go at the end of the reg save area; the rest go at the
2644 beginning. This is for alignment purposes. Returns true if a frame
2645 is really needed. */
2646 static bool
2647 mep_assign_save_slots (int reg_save_size)
2649 bool really_need_stack_frame = false;
2650 int di_ofs = 0;
2651 int i;
2653 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2654 if (mep_call_saves_register(i))
2656 int regsize = mep_reg_size (i);
2658 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2659 || mep_reg_set_in_function (i))
2660 really_need_stack_frame = true;
2662 if (cfun->machine->reg_save_slot[i])
2663 continue;
2665 if (regsize < 8)
2667 cfun->machine->reg_save_size += regsize;
2668 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2670 else
2672 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2673 di_ofs += 8;
2676 cfun->machine->frame_locked = 1;
2677 return really_need_stack_frame;
2680 void
2681 mep_expand_prologue (void)
2683 int i, rss, sp_offset = 0;
2684 int reg_save_size;
2685 int frame_size;
2686 int really_need_stack_frame;
2688 /* We must not allow register renaming in interrupt functions,
2689 because that invalidates the correctness of the set of call-used
2690 registers we're going to save/restore. */
2691 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2693 if (mep_disinterrupt_p ())
2694 emit_insn (gen_mep_disable_int ());
2696 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2698 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2699 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2700 really_need_stack_frame = frame_size;
2702 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2704 sp_offset = reg_save_size;
2705 if (sp_offset + frame_size < 128)
2706 sp_offset += frame_size ;
2708 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2710 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2711 if (mep_call_saves_register(i))
2713 rtx mem;
2714 bool maybe_dead_p;
2715 enum machine_mode rmode;
2717 rss = cfun->machine->reg_save_slot[i];
2719 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2720 && (!mep_reg_set_in_function (i)
2721 && !mep_interrupt_p ()))
2722 continue;
2724 if (mep_reg_size (i) == 8)
2725 rmode = DImode;
2726 else
2727 rmode = SImode;
2729 /* If there is a pseudo associated with this register's initial value,
2730 reload might have already spilt it to the stack slot suggested by
2731 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2732 deleted as dead. */
2733 mem = gen_rtx_MEM (rmode,
2734 plus_constant (Pmode, stack_pointer_rtx,
2735 sp_offset - rss));
2736 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2738 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2739 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2740 else if (rmode == DImode)
2742 rtx insn;
2743 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2745 mem = gen_rtx_MEM (SImode,
2746 plus_constant (Pmode, stack_pointer_rtx,
2747 sp_offset - rss + be));
2749 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2750 gen_rtx_REG (SImode, i),
2751 maybe_dead_p);
2752 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2753 gen_rtx_ZERO_EXTRACT (SImode,
2754 gen_rtx_REG (DImode, i),
2755 GEN_INT (32),
2756 GEN_INT (32)),
2757 maybe_dead_p);
2758 insn = maybe_dead_move (mem,
2759 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2760 maybe_dead_p);
2761 RTX_FRAME_RELATED_P (insn) = 1;
2763 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2764 gen_rtx_SET (VOIDmode,
2765 copy_rtx (mem),
2766 gen_rtx_REG (rmode, i)));
2767 mem = gen_rtx_MEM (SImode,
2768 plus_constant (Pmode, stack_pointer_rtx,
2769 sp_offset - rss + (4-be)));
2770 insn = maybe_dead_move (mem,
2771 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2772 maybe_dead_p);
2774 else
2776 rtx insn;
2777 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2778 gen_rtx_REG (rmode, i),
2779 maybe_dead_p);
2780 insn = maybe_dead_move (mem,
2781 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2782 maybe_dead_p);
2783 RTX_FRAME_RELATED_P (insn) = 1;
2785 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2786 gen_rtx_SET (VOIDmode,
2787 copy_rtx (mem),
2788 gen_rtx_REG (rmode, i)));
2792 if (frame_pointer_needed)
2794 /* We've already adjusted down by sp_offset. Total $sp change
2795 is reg_save_size + frame_size. We want a net change here of
2796 just reg_save_size. */
2797 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2800 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2802 if (mep_interrupt_p ())
2804 mep_reload_pointer(GP_REGNO, "__sdabase");
2805 mep_reload_pointer(TP_REGNO, "__tpbase");
2809 static void
2810 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2812 int local = hwi_local;
2813 int frame_size = local + crtl->outgoing_args_size;
2814 int reg_save_size;
2815 int ffill;
2816 int i, sp, skip;
2817 int sp_offset;
2818 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2820 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2821 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2822 sp_offset = reg_save_size + frame_size;
2824 ffill = cfun->machine->frame_filler;
2826 if (cfun->machine->mep_frame_pointer_needed)
2827 reg_names[FP_REGNO] = "$fp";
2828 else
2829 reg_names[FP_REGNO] = "$8";
2831 if (sp_offset == 0)
2832 return;
2834 if (debug_info_level == DINFO_LEVEL_NONE)
2836 fprintf (file, "\t# frame: %d", sp_offset);
2837 if (reg_save_size)
2838 fprintf (file, " %d regs", reg_save_size);
2839 if (local)
2840 fprintf (file, " %d locals", local);
2841 if (crtl->outgoing_args_size)
2842 fprintf (file, " %d args", crtl->outgoing_args_size);
2843 fprintf (file, "\n");
2844 return;
2847 fprintf (file, "\t#\n");
2848 fprintf (file, "\t# Initial Frame Information:\n");
2849 if (sp_offset || !frame_pointer_needed)
2850 fprintf (file, "\t# Entry ---------- 0\n");
2852 /* Sort registers by save slots, so they're printed in the order
2853 they appear in memory, not the order they're saved in. */
2854 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2855 slot_map[si] = si;
2856 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2857 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2858 if (cfun->machine->reg_save_slot[slot_map[si]]
2859 > cfun->machine->reg_save_slot[slot_map[sj]])
2861 int t = slot_map[si];
2862 slot_map[si] = slot_map[sj];
2863 slot_map[sj] = t;
2866 sp = 0;
2867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2869 int rsize;
2870 int r = slot_map[i];
2871 int rss = cfun->machine->reg_save_slot[r];
2873 if (!mep_call_saves_register (r))
2874 continue;
2876 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2877 && (!mep_reg_set_in_function (r)
2878 && !mep_interrupt_p ()))
2879 continue;
2881 rsize = mep_reg_size(r);
2882 skip = rss - (sp+rsize);
2883 if (skip)
2884 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2885 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2886 rsize, reg_names[r], sp_offset - rss);
2887 sp = rss;
2890 skip = reg_save_size - sp;
2891 if (skip)
2892 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2894 if (frame_pointer_needed)
2895 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2896 if (local)
2897 fprintf (file, "\t# %3d bytes for local vars\n", local);
2898 if (ffill)
2899 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2900 if (crtl->outgoing_args_size)
2901 fprintf (file, "\t# %3d bytes for outgoing args\n",
2902 crtl->outgoing_args_size);
2903 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2904 fprintf (file, "\t#\n");
2908 static int mep_prevent_lp_restore = 0;
2909 static int mep_sibcall_epilogue = 0;
2911 void
2912 mep_expand_epilogue (void)
2914 int i, sp_offset = 0;
2915 int reg_save_size = 0;
2916 int frame_size;
2917 int lp_temp = LP_REGNO, lp_slot = -1;
2918 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2919 int interrupt_handler = mep_interrupt_p ();
2921 if (profile_arc_flag == 2)
2922 emit_insn (gen_mep_bb_trace_ret ());
2924 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2925 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2927 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2929 if (frame_pointer_needed)
2931 /* If we have a frame pointer, we won't have a reliable stack
2932 pointer (alloca, you know), so rebase SP from FP */
2933 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2934 gen_rtx_REG (SImode, FP_REGNO));
2935 sp_offset = reg_save_size;
2937 else
2939 /* SP is right under our local variable space. Adjust it if
2940 needed. */
2941 sp_offset = reg_save_size + frame_size;
2942 if (sp_offset >= 128)
2944 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2945 sp_offset -= frame_size;
2949 /* This is backwards so that we restore the control and coprocessor
2950 registers before the temporary registers we use to restore
2951 them. */
2952 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2953 if (mep_call_saves_register (i))
2955 enum machine_mode rmode;
2956 int rss = cfun->machine->reg_save_slot[i];
2958 if (mep_reg_size (i) == 8)
2959 rmode = DImode;
2960 else
2961 rmode = SImode;
2963 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2964 && !(mep_reg_set_in_function (i) || interrupt_handler))
2965 continue;
2966 if (mep_prevent_lp_restore && i == LP_REGNO)
2967 continue;
2968 if (!mep_prevent_lp_restore
2969 && !interrupt_handler
2970 && (i == 10 || i == 11))
2971 continue;
2973 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2974 emit_move_insn (gen_rtx_REG (rmode, i),
2975 gen_rtx_MEM (rmode,
2976 plus_constant (Pmode, stack_pointer_rtx,
2977 sp_offset - rss)));
2978 else
2980 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2981 /* Defer this one so we can jump indirect rather than
2982 copying the RA to $lp and "ret". EH epilogues
2983 automatically skip this anyway. */
2984 lp_slot = sp_offset-rss;
2985 else
2987 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2988 gen_rtx_MEM (rmode,
2989 plus_constant (Pmode,
2990 stack_pointer_rtx,
2991 sp_offset-rss)));
2992 emit_move_insn (gen_rtx_REG (rmode, i),
2993 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2997 if (lp_slot != -1)
2999 /* Restore this one last so we know it will be in the temp
3000 register when we return by jumping indirectly via the temp. */
3001 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3002 gen_rtx_MEM (SImode,
3003 plus_constant (Pmode, stack_pointer_rtx,
3004 lp_slot)));
3005 lp_temp = REGSAVE_CONTROL_TEMP;
3009 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3011 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3012 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3013 gen_rtx_REG (SImode, SP_REGNO),
3014 cfun->machine->eh_stack_adjust));
3016 if (mep_sibcall_epilogue)
3017 return;
3019 if (mep_disinterrupt_p ())
3020 emit_insn (gen_mep_enable_int ());
3022 if (mep_prevent_lp_restore)
3024 emit_jump_insn (gen_eh_return_internal ());
3025 emit_barrier ();
3027 else if (interrupt_handler)
3028 emit_jump_insn (gen_mep_reti ());
3029 else
3030 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3033 void
3034 mep_expand_eh_return (rtx *operands)
3036 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3038 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3039 emit_move_insn (ra, operands[0]);
3040 operands[0] = ra;
3043 emit_insn (gen_eh_epilogue (operands[0]));
3046 void
3047 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3049 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3050 mep_prevent_lp_restore = 1;
3051 mep_expand_epilogue ();
3052 mep_prevent_lp_restore = 0;
3055 void
3056 mep_expand_sibcall_epilogue (void)
3058 mep_sibcall_epilogue = 1;
3059 mep_expand_epilogue ();
3060 mep_sibcall_epilogue = 0;
3063 static bool
3064 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3066 if (decl == NULL)
3067 return false;
3069 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3070 return false;
3072 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3073 if (mep_interrupt_p () || mep_disinterrupt_p ())
3074 return false;
3076 return true;
3080 mep_return_stackadj_rtx (void)
3082 return gen_rtx_REG (SImode, 10);
3086 mep_return_handler_rtx (void)
3088 return gen_rtx_REG (SImode, LP_REGNO);
3091 void
3092 mep_function_profiler (FILE *file)
3094 /* Always right at the beginning of the function. */
3095 fprintf (file, "\t# mep function profiler\n");
3096 fprintf (file, "\tadd\t$sp, -8\n");
3097 fprintf (file, "\tsw\t$0, ($sp)\n");
3098 fprintf (file, "\tldc\t$0, $lp\n");
3099 fprintf (file, "\tsw\t$0, 4($sp)\n");
3100 fprintf (file, "\tbsr\t__mep_mcount\n");
3101 fprintf (file, "\tlw\t$0, 4($sp)\n");
3102 fprintf (file, "\tstc\t$0, $lp\n");
3103 fprintf (file, "\tlw\t$0, ($sp)\n");
3104 fprintf (file, "\tadd\t$sp, 8\n\n");
3107 const char *
3108 mep_emit_bb_trace_ret (void)
3110 fprintf (asm_out_file, "\t# end of block profiling\n");
3111 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3112 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3113 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3114 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3115 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3116 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3117 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3118 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3119 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3120 return "";
3123 #undef SAVE
3124 #undef RESTORE
3126 /* Operand Printing. */
3128 void
3129 mep_print_operand_address (FILE *stream, rtx address)
3131 if (GET_CODE (address) == MEM)
3132 address = XEXP (address, 0);
3133 else
3134 /* cf: gcc.dg/asm-4.c. */
3135 gcc_assert (GET_CODE (address) == REG);
3137 mep_print_operand (stream, address, 0);
3140 static struct
3142 char code;
3143 const char *pattern;
3144 const char *format;
3146 const conversions[] =
3148 { 0, "r", "0" },
3149 { 0, "m+ri", "3(2)" },
3150 { 0, "mr", "(1)" },
3151 { 0, "ms", "(1)" },
3152 { 0, "ml", "(1)" },
3153 { 0, "mLrs", "%lo(3)(2)" },
3154 { 0, "mLr+si", "%lo(4+5)(2)" },
3155 { 0, "m+ru2s", "%tpoff(5)(2)" },
3156 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3157 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3158 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3159 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3160 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3161 { 0, "mi", "(1)" },
3162 { 0, "m+si", "(2+3)" },
3163 { 0, "m+li", "(2+3)" },
3164 { 0, "i", "0" },
3165 { 0, "s", "0" },
3166 { 0, "+si", "1+2" },
3167 { 0, "+u2si", "%tpoff(3+4)" },
3168 { 0, "+u3si", "%sdaoff(3+4)" },
3169 { 0, "l", "0" },
3170 { 'b', "i", "0" },
3171 { 'B', "i", "0" },
3172 { 'U', "i", "0" },
3173 { 'h', "i", "0" },
3174 { 'h', "Hs", "%hi(1)" },
3175 { 'I', "i", "0" },
3176 { 'I', "u2s", "%tpoff(2)" },
3177 { 'I', "u3s", "%sdaoff(2)" },
3178 { 'I', "+u2si", "%tpoff(3+4)" },
3179 { 'I', "+u3si", "%sdaoff(3+4)" },
3180 { 'J', "i", "0" },
3181 { 'P', "mr", "(1\\+),\\0" },
3182 { 'x', "i", "0" },
3183 { 0, 0, 0 }
3186 static int
3187 unique_bit_in (HOST_WIDE_INT i)
3189 switch (i & 0xff)
3191 case 0x01: case 0xfe: return 0;
3192 case 0x02: case 0xfd: return 1;
3193 case 0x04: case 0xfb: return 2;
3194 case 0x08: case 0xf7: return 3;
3195 case 0x10: case 0x7f: return 4;
3196 case 0x20: case 0xbf: return 5;
3197 case 0x40: case 0xdf: return 6;
3198 case 0x80: case 0xef: return 7;
3199 default:
3200 gcc_unreachable ();
3204 static int
3205 bit_size_for_clip (HOST_WIDE_INT i)
3207 int rv;
3209 for (rv = 0; rv < 31; rv ++)
3210 if (((HOST_WIDE_INT) 1 << rv) > i)
3211 return rv + 1;
3212 gcc_unreachable ();
3215 /* Print an operand to a assembler instruction. */
3217 void
3218 mep_print_operand (FILE *file, rtx x, int code)
3220 int i, j;
3221 const char *real_name;
3223 if (code == '<')
3225 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3226 we're using, then skip over the "mep_" part of its name. */
3227 const struct cgen_insn *insn;
3229 if (mep_get_move_insn (mep_cmov, &insn))
3230 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3231 else
3232 mep_intrinsic_unavailable (mep_cmov);
3233 return;
3235 if (code == 'L')
3237 switch (GET_CODE (x))
3239 case AND:
3240 fputs ("clr", file);
3241 return;
3242 case IOR:
3243 fputs ("set", file);
3244 return;
3245 case XOR:
3246 fputs ("not", file);
3247 return;
3248 default:
3249 output_operand_lossage ("invalid %%L code");
3252 if (code == 'M')
3254 /* Print the second operand of a CR <- CR move. If we're using
3255 a two-operand instruction (i.e., a real cmov), then just print
3256 the operand normally. If we're using a "reg, reg, immediate"
3257 instruction such as caddi3, print the operand followed by a
3258 zero field. If we're using a three-register instruction,
3259 print the operand twice. */
3260 const struct cgen_insn *insn;
3262 mep_print_operand (file, x, 0);
3263 if (mep_get_move_insn (mep_cmov, &insn)
3264 && insn_data[insn->icode].n_operands == 3)
3266 fputs (", ", file);
3267 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3268 mep_print_operand (file, x, 0);
3269 else
3270 mep_print_operand (file, const0_rtx, 0);
3272 return;
3275 encode_pattern (x);
3276 for (i = 0; conversions[i].pattern; i++)
3277 if (conversions[i].code == code
3278 && strcmp(conversions[i].pattern, pattern) == 0)
3280 for (j = 0; conversions[i].format[j]; j++)
3281 if (conversions[i].format[j] == '\\')
3283 fputc (conversions[i].format[j+1], file);
3284 j++;
3286 else if (ISDIGIT(conversions[i].format[j]))
3288 rtx r = patternr[conversions[i].format[j] - '0'];
3289 switch (GET_CODE (r))
3291 case REG:
3292 fprintf (file, "%s", reg_names [REGNO (r)]);
3293 break;
3294 case CONST_INT:
3295 switch (code)
3297 case 'b':
3298 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3299 break;
3300 case 'B':
3301 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3302 break;
3303 case 'h':
3304 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3305 break;
3306 case 'U':
3307 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3308 break;
3309 case 'J':
3310 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3311 break;
3312 case 'x':
3313 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3314 && !(INTVAL (r) & 0xff))
3315 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3316 else
3317 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3318 break;
3319 case 'I':
3320 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3321 && conversions[i].format[j+1] == 0)
3323 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3324 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3326 else
3327 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3328 break;
3329 default:
3330 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3331 break;
3333 break;
3334 case CONST_DOUBLE:
3335 fprintf(file, "[const_double 0x%lx]",
3336 (unsigned long) CONST_DOUBLE_HIGH(r));
3337 break;
3338 case SYMBOL_REF:
3339 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3340 assemble_name (file, real_name);
3341 break;
3342 case LABEL_REF:
3343 output_asm_label (r);
3344 break;
3345 default:
3346 fprintf (stderr, "don't know how to print this operand:");
3347 debug_rtx (r);
3348 gcc_unreachable ();
3351 else
3353 if (conversions[i].format[j] == '+'
3354 && (!code || code == 'I')
3355 && ISDIGIT (conversions[i].format[j+1])
3356 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3357 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3358 continue;
3359 fputc(conversions[i].format[j], file);
3361 break;
3363 if (!conversions[i].pattern)
3365 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3366 debug_rtx(x);
3369 return;
3372 void
3373 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3374 int noperands ATTRIBUTE_UNUSED)
3376 /* Despite the fact that MeP is perfectly capable of branching and
3377 doing something else in the same bundle, gcc does jump
3378 optimization *after* scheduling, so we cannot trust the bundling
3379 flags on jump instructions. */
3380 if (GET_MODE (insn) == BImode
3381 && get_attr_slots (insn) != SLOTS_CORE)
3382 fputc ('+', asm_out_file);
3385 /* Function args in registers. */
3387 static void
3388 mep_setup_incoming_varargs (cumulative_args_t cum,
3389 enum machine_mode mode ATTRIBUTE_UNUSED,
3390 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3391 int second_time ATTRIBUTE_UNUSED)
3393 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3395 if (nsave > 0)
3396 cfun->machine->arg_regs_to_save = nsave;
3397 *pretend_size = nsave * 4;
3400 static int
3401 bytesize (const_tree type, enum machine_mode mode)
3403 if (mode == BLKmode)
3404 return int_size_in_bytes (type);
3405 return GET_MODE_SIZE (mode);
3408 static rtx
3409 mep_expand_builtin_saveregs (void)
3411 int bufsize, i, ns;
3412 rtx regbuf;
3414 ns = cfun->machine->arg_regs_to_save;
3415 if (TARGET_IVC2)
3417 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3418 regbuf = assign_stack_local (SImode, bufsize, 64);
3420 else
3422 bufsize = ns * 4;
3423 regbuf = assign_stack_local (SImode, bufsize, 32);
3426 move_block_from_reg (5-ns, regbuf, ns);
3428 if (TARGET_IVC2)
3430 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3431 int ofs = 8 * ((ns+1)/2);
3433 for (i=0; i<ns; i++)
3435 int rn = (4-ns) + i + 49;
3436 rtx ptr;
3438 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3439 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3440 ofs += 8;
3443 return XEXP (regbuf, 0);
3446 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3448 static tree
3449 mep_build_builtin_va_list (void)
3451 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3452 tree record;
3455 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3457 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3458 get_identifier ("__va_next_gp"), ptr_type_node);
3459 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3460 get_identifier ("__va_next_gp_limit"),
3461 ptr_type_node);
3462 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3463 ptr_type_node);
3464 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3465 ptr_type_node);
3467 DECL_FIELD_CONTEXT (f_next_gp) = record;
3468 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3469 DECL_FIELD_CONTEXT (f_next_cop) = record;
3470 DECL_FIELD_CONTEXT (f_next_stack) = record;
3472 TYPE_FIELDS (record) = f_next_gp;
3473 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3474 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3475 DECL_CHAIN (f_next_cop) = f_next_stack;
3477 layout_type (record);
3479 return record;
3482 static void
3483 mep_expand_va_start (tree valist, rtx nextarg)
3485 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3486 tree next_gp, next_gp_limit, next_cop, next_stack;
3487 tree t, u;
3488 int ns;
3490 ns = cfun->machine->arg_regs_to_save;
3492 f_next_gp = TYPE_FIELDS (va_list_type_node);
3493 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3494 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3495 f_next_stack = DECL_CHAIN (f_next_cop);
3497 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3498 NULL_TREE);
3499 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3500 valist, f_next_gp_limit, NULL_TREE);
3501 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3502 NULL_TREE);
3503 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3504 valist, f_next_stack, NULL_TREE);
3506 /* va_list.next_gp = expand_builtin_saveregs (); */
3507 u = make_tree (sizetype, expand_builtin_saveregs ());
3508 u = fold_convert (ptr_type_node, u);
3509 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3510 TREE_SIDE_EFFECTS (t) = 1;
3511 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3513 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3514 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3515 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3516 TREE_SIDE_EFFECTS (t) = 1;
3517 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3519 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3520 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3521 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3522 TREE_SIDE_EFFECTS (t) = 1;
3523 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3525 /* va_list.next_stack = nextarg; */
3526 u = make_tree (ptr_type_node, nextarg);
3527 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3528 TREE_SIDE_EFFECTS (t) = 1;
3529 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3532 static tree
3533 mep_gimplify_va_arg_expr (tree valist, tree type,
3534 gimple_seq *pre_p,
3535 gimple_seq *post_p ATTRIBUTE_UNUSED)
3537 HOST_WIDE_INT size, rsize;
3538 bool by_reference, ivc2_vec;
3539 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3540 tree next_gp, next_gp_limit, next_cop, next_stack;
3541 tree label_sover, label_selse;
3542 tree tmp, res_addr;
3544 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3546 size = int_size_in_bytes (type);
3547 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3549 if (by_reference)
3551 type = build_pointer_type (type);
3552 size = 4;
3554 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3556 f_next_gp = TYPE_FIELDS (va_list_type_node);
3557 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3558 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3559 f_next_stack = DECL_CHAIN (f_next_cop);
3561 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3562 NULL_TREE);
3563 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3564 valist, f_next_gp_limit, NULL_TREE);
3565 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3566 NULL_TREE);
3567 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3568 valist, f_next_stack, NULL_TREE);
3570 /* if f_next_gp < f_next_gp_limit
3571 IF (VECTOR_P && IVC2)
3572 val = *f_next_cop;
3573 ELSE
3574 val = *f_next_gp;
3575 f_next_gp += 4;
3576 f_next_cop += 8;
3577 else
3578 label_selse:
3579 val = *f_next_stack;
3580 f_next_stack += rsize;
3581 label_sover:
3584 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3585 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3586 res_addr = create_tmp_var (ptr_type_node, NULL);
3588 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3589 unshare_expr (next_gp_limit));
3590 tmp = build3 (COND_EXPR, void_type_node, tmp,
3591 build1 (GOTO_EXPR, void_type_node,
3592 unshare_expr (label_selse)),
3593 NULL_TREE);
3594 gimplify_and_add (tmp, pre_p);
3596 if (ivc2_vec)
3598 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3599 gimplify_and_add (tmp, pre_p);
3601 else
3603 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3604 gimplify_and_add (tmp, pre_p);
3607 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3608 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3610 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3611 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3613 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3614 gimplify_and_add (tmp, pre_p);
3616 /* - - */
3618 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3619 gimplify_and_add (tmp, pre_p);
3621 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3622 gimplify_and_add (tmp, pre_p);
3624 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3625 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3627 /* - - */
3629 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3630 gimplify_and_add (tmp, pre_p);
3632 res_addr = fold_convert (build_pointer_type (type), res_addr);
3634 if (by_reference)
3635 res_addr = build_va_arg_indirect_ref (res_addr);
3637 return build_va_arg_indirect_ref (res_addr);
3640 void
3641 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3642 rtx libname ATTRIBUTE_UNUSED,
3643 tree fndecl ATTRIBUTE_UNUSED)
3645 pcum->nregs = 0;
3647 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3648 pcum->vliw = 1;
3649 else
3650 pcum->vliw = 0;
3653 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3654 larger than 4 bytes are passed indirectly. Return value in 0,
3655 unless bigger than 4 bytes, then the caller passes a pointer as the
3656 first arg. For varargs, we copy $1..$4 to the stack. */
3658 static rtx
3659 mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
3660 const_tree type ATTRIBUTE_UNUSED,
3661 bool named ATTRIBUTE_UNUSED)
3663 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3665 /* VOIDmode is a signal for the backend to pass data to the call
3666 expander via the second operand to the call pattern. We use
3667 this to determine whether to use "jsr" or "jsrv". */
3668 if (mode == VOIDmode)
3669 return GEN_INT (cum->vliw);
3671 /* If we havn't run out of argument registers, return the next. */
3672 if (cum->nregs < 4)
3674 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3675 return gen_rtx_REG (mode, cum->nregs + 49);
3676 else
3677 return gen_rtx_REG (mode, cum->nregs + 1);
3680 /* Otherwise the argument goes on the stack. */
3681 return NULL_RTX;
3684 static bool
3685 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3686 enum machine_mode mode,
3687 const_tree type,
3688 bool named ATTRIBUTE_UNUSED)
3690 int size = bytesize (type, mode);
3692 /* This is non-obvious, but yes, large values passed after we've run
3693 out of registers are *still* passed by reference - we put the
3694 address of the parameter on the stack, as well as putting the
3695 parameter itself elsewhere on the stack. */
3697 if (size <= 0 || size > 8)
3698 return true;
3699 if (size <= 4)
3700 return false;
3701 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3702 && type != NULL_TREE && VECTOR_TYPE_P (type))
3703 return false;
3704 return true;
3707 static void
3708 mep_function_arg_advance (cumulative_args_t pcum,
3709 enum machine_mode mode ATTRIBUTE_UNUSED,
3710 const_tree type ATTRIBUTE_UNUSED,
3711 bool named ATTRIBUTE_UNUSED)
3713 get_cumulative_args (pcum)->nregs += 1;
3716 bool
3717 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3719 int size = bytesize (type, BLKmode);
3720 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3721 return size > 0 && size <= 8 ? 0 : 1;
3722 return size > 0 && size <= 4 ? 0 : 1;
3725 static bool
3726 mep_narrow_volatile_bitfield (void)
3728 return true;
3729 return false;
3732 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3735 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3737 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3738 return gen_rtx_REG (TYPE_MODE (type), 48);
3739 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3742 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3745 mep_libcall_value (enum machine_mode mode)
3747 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3750 /* Handle pipeline hazards. */
3752 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3753 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3755 static int prev_opcode = 0;
3757 /* This isn't as optimal as it could be, because we don't know what
3758 control register the STC opcode is storing in. We only need to add
3759 the nop if it's the relevant register, but we add it for irrelevant
3760 registers also. */
3762 void
3763 mep_asm_output_opcode (FILE *file, const char *ptr)
3765 int this_opcode = op_none;
3766 const char *hazard = 0;
3768 switch (*ptr)
3770 case 'f':
3771 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3772 this_opcode = op_fsft;
3773 break;
3774 case 'r':
3775 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3776 this_opcode = op_ret;
3777 break;
3778 case 's':
3779 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3780 this_opcode = op_stc;
3781 break;
3784 if (prev_opcode == op_stc && this_opcode == op_fsft)
3785 hazard = "nop";
3786 if (prev_opcode == op_stc && this_opcode == op_ret)
3787 hazard = "nop";
3789 if (hazard)
3790 fprintf(file, "%s\t# %s-%s hazard\n\t",
3791 hazard, opnames[prev_opcode], opnames[this_opcode]);
3793 prev_opcode = this_opcode;
3796 /* Handle attributes. */
3798 static tree
3799 mep_validate_based_tiny (tree *node, tree name, tree args,
3800 int flags ATTRIBUTE_UNUSED, bool *no_add)
3802 if (TREE_CODE (*node) != VAR_DECL
3803 && TREE_CODE (*node) != POINTER_TYPE
3804 && TREE_CODE (*node) != TYPE_DECL)
3806 warning (0, "%qE attribute only applies to variables", name);
3807 *no_add = true;
3809 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3811 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3813 warning (0, "address region attributes not allowed with auto storage class");
3814 *no_add = true;
3816 /* Ignore storage attribute of pointed to variable: char __far * x; */
3817 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3819 warning (0, "address region attributes on pointed-to types ignored");
3820 *no_add = true;
3824 return NULL_TREE;
3827 static int
3828 mep_multiple_address_regions (tree list, bool check_section_attr)
3830 tree a;
3831 int count_sections = 0;
3832 int section_attr_count = 0;
3834 for (a = list; a; a = TREE_CHAIN (a))
3836 if (is_attribute_p ("based", TREE_PURPOSE (a))
3837 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3838 || is_attribute_p ("near", TREE_PURPOSE (a))
3839 || is_attribute_p ("far", TREE_PURPOSE (a))
3840 || is_attribute_p ("io", TREE_PURPOSE (a)))
3841 count_sections ++;
3842 if (check_section_attr)
3843 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3846 if (check_section_attr)
3847 return section_attr_count;
3848 else
3849 return count_sections;
3852 #define MEP_ATTRIBUTES(decl) \
3853 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3854 : DECL_ATTRIBUTES (decl) \
3855 ? (DECL_ATTRIBUTES (decl)) \
3856 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3858 static tree
3859 mep_validate_near_far (tree *node, tree name, tree args,
3860 int flags ATTRIBUTE_UNUSED, bool *no_add)
3862 if (TREE_CODE (*node) != VAR_DECL
3863 && TREE_CODE (*node) != FUNCTION_DECL
3864 && TREE_CODE (*node) != METHOD_TYPE
3865 && TREE_CODE (*node) != POINTER_TYPE
3866 && TREE_CODE (*node) != TYPE_DECL)
3868 warning (0, "%qE attribute only applies to variables and functions",
3869 name);
3870 *no_add = true;
3872 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3874 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3876 warning (0, "address region attributes not allowed with auto storage class");
3877 *no_add = true;
3879 /* Ignore storage attribute of pointed to variable: char __far * x; */
3880 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3882 warning (0, "address region attributes on pointed-to types ignored");
3883 *no_add = true;
3886 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3888 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3889 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3890 DECL_ATTRIBUTES (*node) = NULL_TREE;
3892 return NULL_TREE;
3895 static tree
3896 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3897 int flags ATTRIBUTE_UNUSED, bool *no_add)
3899 if (TREE_CODE (*node) != FUNCTION_DECL
3900 && TREE_CODE (*node) != METHOD_TYPE)
3902 warning (0, "%qE attribute only applies to functions", name);
3903 *no_add = true;
3905 return NULL_TREE;
3908 static tree
3909 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3910 int flags ATTRIBUTE_UNUSED, bool *no_add)
3912 tree function_type;
3914 if (TREE_CODE (*node) != FUNCTION_DECL)
3916 warning (0, "%qE attribute only applies to functions", name);
3917 *no_add = true;
3918 return NULL_TREE;
3921 if (DECL_DECLARED_INLINE_P (*node))
3922 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3923 DECL_UNINLINABLE (*node) = 1;
3925 function_type = TREE_TYPE (*node);
3927 if (TREE_TYPE (function_type) != void_type_node)
3928 error ("interrupt function must have return type of void");
3930 if (prototype_p (function_type)
3931 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3932 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3933 error ("interrupt function must have no arguments");
3935 return NULL_TREE;
3938 static tree
3939 mep_validate_io_cb (tree *node, tree name, tree args,
3940 int flags ATTRIBUTE_UNUSED, bool *no_add)
3942 if (TREE_CODE (*node) != VAR_DECL)
3944 warning (0, "%qE attribute only applies to variables", name);
3945 *no_add = true;
3948 if (args != NULL_TREE)
3950 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3951 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3952 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3954 warning (0, "%qE attribute allows only an integer constant argument",
3955 name);
3956 *no_add = true;
3960 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3961 TREE_THIS_VOLATILE (*node) = 1;
3963 return NULL_TREE;
3966 static tree
3967 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3968 int flags ATTRIBUTE_UNUSED, bool *no_add)
3970 if (TREE_CODE (*node) != FUNCTION_TYPE
3971 && TREE_CODE (*node) != FUNCTION_DECL
3972 && TREE_CODE (*node) != METHOD_TYPE
3973 && TREE_CODE (*node) != FIELD_DECL
3974 && TREE_CODE (*node) != TYPE_DECL)
3976 static int gave_pointer_note = 0;
3977 static int gave_array_note = 0;
3978 static const char * given_type = NULL;
3980 given_type = get_tree_code_name (TREE_CODE (*node));
3981 if (TREE_CODE (*node) == POINTER_TYPE)
3982 given_type = "pointers";
3983 if (TREE_CODE (*node) == ARRAY_TYPE)
3984 given_type = "arrays";
3986 if (given_type)
3987 warning (0, "%qE attribute only applies to functions, not %s",
3988 name, given_type);
3989 else
3990 warning (0, "%qE attribute only applies to functions",
3991 name);
3992 *no_add = true;
3994 if (TREE_CODE (*node) == POINTER_TYPE
3995 && !gave_pointer_note)
3997 inform (input_location,
3998 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3999 " typedef int (__vliw *vfuncptr) ();");
4000 gave_pointer_note = 1;
4003 if (TREE_CODE (*node) == ARRAY_TYPE
4004 && !gave_array_note)
4006 inform (input_location,
4007 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4008 " typedef int (__vliw *vfuncptr[]) ();");
4009 gave_array_note = 1;
4012 if (!TARGET_VLIW)
4013 error ("VLIW functions are not allowed without a VLIW configuration");
4014 return NULL_TREE;
4017 static const struct attribute_spec mep_attribute_table[11] =
4019 /* name min max decl type func handler
4020 affects_type_identity */
4021 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4022 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4023 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4024 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4025 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4026 false },
4027 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4028 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4029 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4030 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4031 { NULL, 0, 0, false, false, false, NULL, false }
4034 static bool
4035 mep_function_attribute_inlinable_p (const_tree callee)
4037 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4038 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4039 return (lookup_attribute ("disinterrupt", attrs) == 0
4040 && lookup_attribute ("interrupt", attrs) == 0);
4043 static bool
4044 mep_can_inline_p (tree caller, tree callee)
4046 if (TREE_CODE (callee) == ADDR_EXPR)
4047 callee = TREE_OPERAND (callee, 0);
4049 if (!mep_vliw_function_p (caller)
4050 && mep_vliw_function_p (callee))
4052 return false;
4054 return true;
4057 #define FUNC_CALL 1
4058 #define FUNC_DISINTERRUPT 2
4061 struct GTY(()) pragma_entry {
4062 int used;
4063 int flag;
4064 const char *funcname;
4066 typedef struct pragma_entry pragma_entry;
4068 /* Hash table of farcall-tagged sections. */
4069 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4071 static int
4072 pragma_entry_eq (const void *p1, const void *p2)
4074 const pragma_entry *old = (const pragma_entry *) p1;
4075 const char *new_name = (const char *) p2;
4077 return strcmp (old->funcname, new_name) == 0;
4080 static hashval_t
4081 pragma_entry_hash (const void *p)
4083 const pragma_entry *old = (const pragma_entry *) p;
4084 return htab_hash_string (old->funcname);
4087 static void
4088 mep_note_pragma_flag (const char *funcname, int flag)
4090 pragma_entry **slot;
4092 if (!pragma_htab)
4093 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4094 pragma_entry_eq, NULL);
4096 slot = (pragma_entry **)
4097 htab_find_slot_with_hash (pragma_htab, funcname,
4098 htab_hash_string (funcname), INSERT);
4100 if (!*slot)
4102 *slot = ggc_alloc<pragma_entry> ();
4103 (*slot)->flag = 0;
4104 (*slot)->used = 0;
4105 (*slot)->funcname = ggc_strdup (funcname);
4107 (*slot)->flag |= flag;
4110 static bool
4111 mep_lookup_pragma_flag (const char *funcname, int flag)
4113 pragma_entry **slot;
4115 if (!pragma_htab)
4116 return false;
4118 if (funcname[0] == '@' && funcname[2] == '.')
4119 funcname += 3;
4121 slot = (pragma_entry **)
4122 htab_find_slot_with_hash (pragma_htab, funcname,
4123 htab_hash_string (funcname), NO_INSERT);
4124 if (slot && *slot && ((*slot)->flag & flag))
4126 (*slot)->used |= flag;
4127 return true;
4129 return false;
4132 bool
4133 mep_lookup_pragma_call (const char *funcname)
4135 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4138 void
4139 mep_note_pragma_call (const char *funcname)
4141 mep_note_pragma_flag (funcname, FUNC_CALL);
4144 bool
4145 mep_lookup_pragma_disinterrupt (const char *funcname)
4147 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4150 void
4151 mep_note_pragma_disinterrupt (const char *funcname)
4153 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4156 static int
4157 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4159 const pragma_entry *d = (const pragma_entry *)(*slot);
4161 if ((d->flag & FUNC_DISINTERRUPT)
4162 && !(d->used & FUNC_DISINTERRUPT))
4163 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4164 return 1;
4167 void
4168 mep_file_cleanups (void)
4170 if (pragma_htab)
4171 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4174 /* These three functions provide a bridge between the pramgas that
4175 affect register classes, and the functions that maintain them. We
4176 can't call those functions directly as pragma handling is part of
4177 the front end and doesn't have direct access to them. */
4179 void
4180 mep_save_register_info (void)
4182 save_register_info ();
4185 void
4186 mep_reinit_regs (void)
4188 reinit_regs ();
4191 void
4192 mep_init_regs (void)
4194 init_regs ();
4199 static int
4200 mep_attrlist_to_encoding (tree list, tree decl)
4202 if (mep_multiple_address_regions (list, false) > 1)
4204 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4205 TREE_PURPOSE (TREE_CHAIN (list)),
4206 DECL_NAME (decl),
4207 DECL_SOURCE_LINE (decl));
4208 TREE_CHAIN (list) = NULL_TREE;
4211 while (list)
4213 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4214 return 'b';
4215 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4216 return 't';
4217 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4218 return 'n';
4219 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4220 return 'f';
4221 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4223 if (TREE_VALUE (list)
4224 && TREE_VALUE (TREE_VALUE (list))
4225 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4227 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4228 if (location >= 0
4229 && location <= 0x1000000)
4230 return 'i';
4232 return 'I';
4234 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4235 return 'c';
4236 list = TREE_CHAIN (list);
4238 if (TARGET_TF
4239 && TREE_CODE (decl) == FUNCTION_DECL
4240 && DECL_SECTION_NAME (decl) == 0)
4241 return 'f';
4242 return 0;
4245 static int
4246 mep_comp_type_attributes (const_tree t1, const_tree t2)
4248 int vliw1, vliw2;
4250 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4251 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4253 if (vliw1 != vliw2)
4254 return 0;
4256 return 1;
4259 static void
4260 mep_insert_attributes (tree decl, tree *attributes)
4262 int size;
4263 const char *secname = 0;
4264 tree attrib, attrlist;
4265 char encoding;
4267 if (TREE_CODE (decl) == FUNCTION_DECL)
4269 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4271 if (mep_lookup_pragma_disinterrupt (funcname))
4273 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4274 *attributes = chainon (*attributes, attrib);
4278 if (TREE_CODE (decl) != VAR_DECL
4279 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4280 return;
4282 if (TREE_READONLY (decl) && TARGET_DC)
4283 /* -mdc means that const variables default to the near section,
4284 regardless of the size cutoff. */
4285 return;
4287 /* User specified an attribute, so override the default.
4288 Ignore storage attribute of pointed to variable. char __far * x; */
4289 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4291 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4292 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4293 else if (DECL_ATTRIBUTES (decl) && *attributes)
4294 DECL_ATTRIBUTES (decl) = NULL_TREE;
4297 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4298 encoding = mep_attrlist_to_encoding (attrlist, decl);
4299 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4301 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4302 encoding = mep_attrlist_to_encoding (attrlist, decl);
4304 if (encoding)
4306 /* This means that the declaration has a specific section
4307 attribute, so we should not apply the default rules. */
4309 if (encoding == 'i' || encoding == 'I')
4311 tree attr = lookup_attribute ("io", attrlist);
4312 if (attr
4313 && TREE_VALUE (attr)
4314 && TREE_VALUE (TREE_VALUE(attr)))
4316 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4317 static tree previous_value = 0;
4318 static int previous_location = 0;
4319 static tree previous_name = 0;
4321 /* We take advantage of the fact that gcc will reuse the
4322 same tree pointer when applying an attribute to a
4323 list of decls, but produce a new tree for attributes
4324 on separate source lines, even when they're textually
4325 identical. This is the behavior we want. */
4326 if (TREE_VALUE (attr) == previous_value
4327 && location == previous_location)
4329 warning(0, "__io address 0x%x is the same for %qE and %qE",
4330 location, previous_name, DECL_NAME (decl));
4332 previous_name = DECL_NAME (decl);
4333 previous_location = location;
4334 previous_value = TREE_VALUE (attr);
4337 return;
4341 /* Declarations of arrays can change size. Don't trust them. */
4342 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4343 size = 0;
4344 else
4345 size = int_size_in_bytes (TREE_TYPE (decl));
4347 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4349 if (TREE_PUBLIC (decl)
4350 || DECL_EXTERNAL (decl)
4351 || TREE_STATIC (decl))
4353 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4354 int key = 0;
4356 while (*name)
4357 key += *name++;
4359 switch (key & 3)
4361 case 0:
4362 secname = "based";
4363 break;
4364 case 1:
4365 secname = "tiny";
4366 break;
4367 case 2:
4368 secname = "far";
4369 break;
4370 default:
4375 else
4377 if (size <= mep_based_cutoff && size > 0)
4378 secname = "based";
4379 else if (size <= mep_tiny_cutoff && size > 0)
4380 secname = "tiny";
4381 else if (TARGET_L)
4382 secname = "far";
4385 if (mep_const_section && TREE_READONLY (decl))
4387 if (strcmp (mep_const_section, "tiny") == 0)
4388 secname = "tiny";
4389 else if (strcmp (mep_const_section, "near") == 0)
4390 return;
4391 else if (strcmp (mep_const_section, "far") == 0)
4392 secname = "far";
4395 if (!secname)
4396 return;
4398 if (!mep_multiple_address_regions (*attributes, true)
4399 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4401 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4403 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4404 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4405 and mep_validate_based_tiny. */
4406 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4410 static void
4411 mep_encode_section_info (tree decl, rtx rtl, int first)
4413 rtx rtlname;
4414 const char *oldname;
4415 const char *secname;
4416 char encoding;
4417 char *newname;
4418 tree idp;
4419 int maxsize;
4420 tree type;
4421 tree mep_attributes;
4423 if (! first)
4424 return;
4426 if (TREE_CODE (decl) != VAR_DECL
4427 && TREE_CODE (decl) != FUNCTION_DECL)
4428 return;
4430 rtlname = XEXP (rtl, 0);
4431 if (GET_CODE (rtlname) == SYMBOL_REF)
4432 oldname = XSTR (rtlname, 0);
4433 else if (GET_CODE (rtlname) == MEM
4434 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4435 oldname = XSTR (XEXP (rtlname, 0), 0);
4436 else
4437 gcc_unreachable ();
4439 type = TREE_TYPE (decl);
4440 if (type == error_mark_node)
4441 return;
4442 mep_attributes = MEP_ATTRIBUTES (decl);
4444 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4446 if (encoding)
4448 newname = (char *) alloca (strlen (oldname) + 4);
4449 sprintf (newname, "@%c.%s", encoding, oldname);
4450 idp = get_identifier (newname);
4451 XEXP (rtl, 0) =
4452 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4453 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4454 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4456 switch (encoding)
4458 case 'b':
4459 maxsize = 128;
4460 secname = "based";
4461 break;
4462 case 't':
4463 maxsize = 65536;
4464 secname = "tiny";
4465 break;
4466 case 'n':
4467 maxsize = 0x1000000;
4468 secname = "near";
4469 break;
4470 default:
4471 maxsize = 0;
4472 secname = 0;
4473 break;
4475 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4477 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4478 oldname,
4479 (long) int_size_in_bytes (TREE_TYPE (decl)),
4480 secname,
4481 maxsize);
4486 const char *
4487 mep_strip_name_encoding (const char *sym)
4489 while (1)
4491 if (*sym == '*')
4492 sym++;
4493 else if (*sym == '@' && sym[2] == '.')
4494 sym += 3;
4495 else
4496 return sym;
4500 static section *
4501 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4502 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4504 int readonly = 1;
4505 int encoding;
4507 switch (TREE_CODE (decl))
4509 case VAR_DECL:
4510 if (!TREE_READONLY (decl)
4511 || TREE_SIDE_EFFECTS (decl)
4512 || !DECL_INITIAL (decl)
4513 || (DECL_INITIAL (decl) != error_mark_node
4514 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4515 readonly = 0;
4516 break;
4517 case CONSTRUCTOR:
4518 if (! TREE_CONSTANT (decl))
4519 readonly = 0;
4520 break;
4522 default:
4523 break;
4526 if (TREE_CODE (decl) == FUNCTION_DECL)
4528 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4530 if (name[0] == '@' && name[2] == '.')
4531 encoding = name[1];
4532 else
4533 encoding = 0;
4535 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4536 mep_unique_section (decl, 0);
4537 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4539 if (encoding == 'f')
4540 return vftext_section;
4541 else
4542 return vtext_section;
4544 else if (encoding == 'f')
4545 return ftext_section;
4546 else
4547 return text_section;
4550 if (TREE_CODE (decl) == VAR_DECL)
4552 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4554 if (name[0] == '@' && name[2] == '.')
4555 switch (name[1])
4557 case 'b':
4558 return based_section;
4560 case 't':
4561 if (readonly)
4562 return srodata_section;
4563 if (DECL_INITIAL (decl))
4564 return sdata_section;
4565 return tinybss_section;
4567 case 'f':
4568 if (readonly)
4569 return frodata_section;
4570 return far_section;
4572 case 'i':
4573 case 'I':
4574 error_at (DECL_SOURCE_LOCATION (decl),
4575 "variable %D of type %<io%> must be uninitialized", decl);
4576 return data_section;
4578 case 'c':
4579 error_at (DECL_SOURCE_LOCATION (decl),
4580 "variable %D of type %<cb%> must be uninitialized", decl);
4581 return data_section;
4585 if (readonly)
4586 return readonly_data_section;
4588 return data_section;
4591 static void
4592 mep_unique_section (tree decl, int reloc)
4594 static const char *prefixes[][2] =
4596 { ".text.", ".gnu.linkonce.t." },
4597 { ".rodata.", ".gnu.linkonce.r." },
4598 { ".data.", ".gnu.linkonce.d." },
4599 { ".based.", ".gnu.linkonce.based." },
4600 { ".sdata.", ".gnu.linkonce.s." },
4601 { ".far.", ".gnu.linkonce.far." },
4602 { ".ftext.", ".gnu.linkonce.ft." },
4603 { ".frodata.", ".gnu.linkonce.frd." },
4604 { ".srodata.", ".gnu.linkonce.srd." },
4605 { ".vtext.", ".gnu.linkonce.v." },
4606 { ".vftext.", ".gnu.linkonce.vf." }
4608 int sec = 2; /* .data */
4609 int len;
4610 const char *name, *prefix;
4611 char *string;
4613 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4614 if (DECL_RTL (decl))
4615 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4617 if (TREE_CODE (decl) == FUNCTION_DECL)
4619 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4620 sec = 9; /* .vtext */
4621 else
4622 sec = 0; /* .text */
4624 else if (decl_readonly_section (decl, reloc))
4625 sec = 1; /* .rodata */
4627 if (name[0] == '@' && name[2] == '.')
4629 switch (name[1])
4631 case 'b':
4632 sec = 3; /* .based */
4633 break;
4634 case 't':
4635 if (sec == 1)
4636 sec = 8; /* .srodata */
4637 else
4638 sec = 4; /* .sdata */
4639 break;
4640 case 'f':
4641 if (sec == 0)
4642 sec = 6; /* .ftext */
4643 else if (sec == 9)
4644 sec = 10; /* .vftext */
4645 else if (sec == 1)
4646 sec = 7; /* .frodata */
4647 else
4648 sec = 5; /* .far. */
4649 break;
4651 name += 3;
4654 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4655 len = strlen (name) + strlen (prefix);
4656 string = (char *) alloca (len + 1);
4658 sprintf (string, "%s%s", prefix, name);
4660 DECL_SECTION_NAME (decl) = build_string (len, string);
4663 /* Given a decl, a section name, and whether the decl initializer
4664 has relocs, choose attributes for the section. */
4666 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4668 static unsigned int
4669 mep_section_type_flags (tree decl, const char *name, int reloc)
4671 unsigned int flags = default_section_type_flags (decl, name, reloc);
4673 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4674 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4675 flags |= SECTION_MEP_VLIW;
4677 return flags;
4680 /* Switch to an arbitrary section NAME with attributes as specified
4681 by FLAGS. ALIGN specifies any known alignment requirements for
4682 the section; 0 if the default should be used.
4684 Differs from the standard ELF version only in support of VLIW mode. */
4686 static void
4687 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4689 char flagchars[8], *f = flagchars;
4690 const char *type;
4692 if (!(flags & SECTION_DEBUG))
4693 *f++ = 'a';
4694 if (flags & SECTION_WRITE)
4695 *f++ = 'w';
4696 if (flags & SECTION_CODE)
4697 *f++ = 'x';
4698 if (flags & SECTION_SMALL)
4699 *f++ = 's';
4700 if (flags & SECTION_MEP_VLIW)
4701 *f++ = 'v';
4702 *f = '\0';
4704 if (flags & SECTION_BSS)
4705 type = "nobits";
4706 else
4707 type = "progbits";
4709 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4710 name, flagchars, type);
4712 if (flags & SECTION_CODE)
4713 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4714 asm_out_file);
4717 void
4718 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4719 int size, int align, int global)
4721 /* We intentionally don't use mep_section_tag() here. */
4722 if (name[0] == '@'
4723 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4724 && name[2] == '.')
4726 int location = -1;
4727 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4728 DECL_ATTRIBUTES (decl));
4729 if (attr
4730 && TREE_VALUE (attr)
4731 && TREE_VALUE (TREE_VALUE(attr)))
4732 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4733 if (location == -1)
4734 return;
4735 if (global)
4737 fprintf (stream, "\t.globl\t");
4738 assemble_name (stream, name);
4739 fprintf (stream, "\n");
4741 assemble_name (stream, name);
4742 fprintf (stream, " = %d\n", location);
4743 return;
4745 if (name[0] == '@' && name[2] == '.')
4747 const char *sec = 0;
4748 switch (name[1])
4750 case 'b':
4751 switch_to_section (based_section);
4752 sec = ".based";
4753 break;
4754 case 't':
4755 switch_to_section (tinybss_section);
4756 sec = ".sbss";
4757 break;
4758 case 'f':
4759 switch_to_section (farbss_section);
4760 sec = ".farbss";
4761 break;
4763 if (sec)
4765 const char *name2;
4766 int p2align = 0;
4768 while (align > BITS_PER_UNIT)
4770 align /= 2;
4771 p2align ++;
4773 name2 = targetm.strip_name_encoding (name);
4774 if (global)
4775 fprintf (stream, "\t.globl\t%s\n", name2);
4776 fprintf (stream, "\t.p2align %d\n", p2align);
4777 fprintf (stream, "\t.type\t%s,@object\n", name2);
4778 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4779 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4780 return;
4784 if (!global)
4786 fprintf (stream, "\t.local\t");
4787 assemble_name (stream, name);
4788 fprintf (stream, "\n");
4790 fprintf (stream, "\t.comm\t");
4791 assemble_name (stream, name);
4792 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4795 /* Trampolines. */
4797 static void
4798 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4800 rtx addr = XEXP (m_tramp, 0);
4801 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4803 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4804 LCT_NORMAL, VOIDmode, 3,
4805 addr, Pmode,
4806 fnaddr, Pmode,
4807 static_chain, Pmode);
4810 /* Experimental Reorg. */
4812 static bool
4813 mep_mentioned_p (rtx in,
4814 rtx reg, /* NULL for mem */
4815 int modes_too) /* if nonzero, modes must match also. */
4817 const char *fmt;
4818 int i;
4819 enum rtx_code code;
4821 if (in == 0)
4822 return false;
4823 if (reg && GET_CODE (reg) != REG)
4824 return false;
4826 if (GET_CODE (in) == LABEL_REF)
4827 return (reg == 0);
4829 code = GET_CODE (in);
4831 switch (code)
4833 case MEM:
4834 if (reg)
4835 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4836 return true;
4838 case REG:
4839 if (!reg)
4840 return false;
4841 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4842 return false;
4843 return (REGNO (in) == REGNO (reg));
4845 case SCRATCH:
4846 case CC0:
4847 case PC:
4848 case CONST_INT:
4849 case CONST_DOUBLE:
4850 return false;
4852 default:
4853 break;
4856 /* Set's source should be read-only. */
4857 if (code == SET && !reg)
4858 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4860 fmt = GET_RTX_FORMAT (code);
4862 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4864 if (fmt[i] == 'E')
4866 register int j;
4867 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4868 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4869 return true;
4871 else if (fmt[i] == 'e'
4872 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4873 return true;
4875 return false;
4878 #define EXPERIMENTAL_REGMOVE_REORG 1
4880 #if EXPERIMENTAL_REGMOVE_REORG
4882 static int
4883 mep_compatible_reg_class (int r1, int r2)
4885 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4886 return 1;
4887 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4888 return 1;
4889 return 0;
4892 static void
4893 mep_reorg_regmove (rtx insns)
4895 rtx insn, next, pat, follow, *where;
4896 int count = 0, done = 0, replace, before = 0;
4898 if (dump_file)
4899 for (insn = insns; insn; insn = NEXT_INSN (insn))
4900 if (NONJUMP_INSN_P (insn))
4901 before++;
4903 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4904 set that uses the r2 and r2 dies there. We replace r2 with r1
4905 and see if it's still a valid insn. If so, delete the first set.
4906 Copied from reorg.c. */
4908 while (!done)
4910 done = 1;
4911 for (insn = insns; insn; insn = next)
4913 next = next_nonnote_nondebug_insn (insn);
4914 if (! NONJUMP_INSN_P (insn))
4915 continue;
4916 pat = PATTERN (insn);
4918 replace = 0;
4920 if (GET_CODE (pat) == SET
4921 && GET_CODE (SET_SRC (pat)) == REG
4922 && GET_CODE (SET_DEST (pat)) == REG
4923 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4924 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4926 follow = next_nonnote_nondebug_insn (insn);
4927 if (dump_file)
4928 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4930 while (follow && NONJUMP_INSN_P (follow)
4931 && GET_CODE (PATTERN (follow)) == SET
4932 && !dead_or_set_p (follow, SET_SRC (pat))
4933 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4934 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4936 if (dump_file)
4937 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4938 follow = next_nonnote_insn (follow);
4941 if (dump_file)
4942 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4943 if (follow && NONJUMP_INSN_P (follow)
4944 && GET_CODE (PATTERN (follow)) == SET
4945 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4947 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4949 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4951 replace = 1;
4952 where = & SET_SRC (PATTERN (follow));
4955 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4957 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4959 replace = 1;
4960 where = & PATTERN (follow);
4966 /* If so, follow is the corresponding insn */
4967 if (replace)
4969 if (dump_file)
4971 rtx x;
4973 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4974 for (x = insn; x ;x = NEXT_INSN (x))
4976 print_rtl_single (dump_file, x);
4977 if (x == follow)
4978 break;
4979 fprintf (dump_file, "\n");
4983 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4984 follow, where))
4986 count ++;
4987 delete_insn (insn);
4988 if (dump_file)
4990 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4991 print_rtl_single (dump_file, follow);
4993 done = 0;
4999 if (dump_file)
5001 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5002 fprintf (dump_file, "=====\n");
5005 #endif
5008 /* Figure out where to put LABEL, which is the label for a repeat loop.
5009 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5010 the loop ends just before LAST_INSN. If SHARED, insns other than the
5011 "repeat" might use LABEL to jump to the loop's continuation point.
5013 Return the last instruction in the adjusted loop. */
5015 static rtx
5016 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5017 bool shared)
5019 rtx next, prev;
5020 int count = 0, code, icode;
5022 if (dump_file)
5023 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5024 INSN_UID (last_insn));
5026 /* Set PREV to the last insn in the loop. */
5027 prev = last_insn;
5028 if (!including)
5029 prev = PREV_INSN (prev);
5031 /* Set NEXT to the next insn after the repeat label. */
5032 next = last_insn;
5033 if (!shared)
5034 while (prev != 0)
5036 code = GET_CODE (prev);
5037 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5038 break;
5040 if (INSN_P (prev))
5042 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5043 prev = XVECEXP (PATTERN (prev), 0, 1);
5045 /* Other insns that should not be in the last two opcodes. */
5046 icode = recog_memoized (prev);
5047 if (icode < 0
5048 || icode == CODE_FOR_repeat
5049 || icode == CODE_FOR_erepeat
5050 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5051 break;
5053 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5054 is the second instruction in a VLIW bundle. In that case,
5055 loop again: if the first instruction also satisfies the
5056 conditions above then we will reach here again and put
5057 both of them into the repeat epilogue. Otherwise both
5058 should remain outside. */
5059 if (GET_MODE (prev) != BImode)
5061 count++;
5062 next = prev;
5063 if (dump_file)
5064 print_rtl_single (dump_file, next);
5065 if (count == 2)
5066 break;
5069 prev = PREV_INSN (prev);
5072 /* See if we're adding the label immediately after the repeat insn.
5073 If so, we need to separate them with a nop. */
5074 prev = prev_real_insn (next);
5075 if (prev)
5076 switch (recog_memoized (prev))
5078 case CODE_FOR_repeat:
5079 case CODE_FOR_erepeat:
5080 if (dump_file)
5081 fprintf (dump_file, "Adding nop inside loop\n");
5082 emit_insn_before (gen_nop (), next);
5083 break;
5085 default:
5086 break;
5089 /* Insert the label. */
5090 emit_label_before (label, next);
5092 /* Insert the nops. */
5093 if (dump_file && count < 2)
5094 fprintf (dump_file, "Adding %d nop%s\n\n",
5095 2 - count, count == 1 ? "" : "s");
5097 for (; count < 2; count++)
5098 if (including)
5099 last_insn = emit_insn_after (gen_nop (), last_insn);
5100 else
5101 emit_insn_before (gen_nop (), last_insn);
5103 return last_insn;
5107 void
5108 mep_emit_doloop (rtx *operands, int is_end)
5110 rtx tag;
5112 if (cfun->machine->doloop_tags == 0
5113 || cfun->machine->doloop_tag_from_end == is_end)
5115 cfun->machine->doloop_tags++;
5116 cfun->machine->doloop_tag_from_end = is_end;
5119 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5120 if (is_end)
5121 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5122 else
5123 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5127 /* Code for converting doloop_begins and doloop_ends into valid
5128 MeP instructions. A doloop_begin is just a placeholder:
5130 $count = unspec ($count)
5132 where $count is initially the number of iterations - 1.
5133 doloop_end has the form:
5135 if ($count-- == 0) goto label
5137 The counter variable is private to the doloop insns, nothing else
5138 relies on its value.
5140 There are three cases, in decreasing order of preference:
5142 1. A loop has exactly one doloop_begin and one doloop_end.
5143 The doloop_end branches to the first instruction after
5144 the doloop_begin.
5146 In this case we can replace the doloop_begin with a repeat
5147 instruction and remove the doloop_end. I.e.:
5149 $count1 = unspec ($count1)
5150 label:
5152 insn1
5153 insn2
5154 if ($count2-- == 0) goto label
5156 becomes:
5158 repeat $count1,repeat_label
5159 label:
5161 repeat_label:
5162 insn1
5163 insn2
5164 # end repeat
5166 2. As for (1), except there are several doloop_ends. One of them
5167 (call it X) falls through to a label L. All the others fall
5168 through to branches to L.
5170 In this case, we remove X and replace the other doloop_ends
5171 with branches to the repeat label. For example:
5173 $count1 = unspec ($count1)
5174 start:
5176 if ($count2-- == 0) goto label
5177 end:
5179 if ($count3-- == 0) goto label
5180 goto end
5182 becomes:
5184 repeat $count1,repeat_label
5185 start:
5187 repeat_label:
5190 # end repeat
5191 end:
5193 goto repeat_label
5195 3. The fallback case. Replace doloop_begins with:
5197 $count = $count + 1
5199 Replace doloop_ends with the equivalent of:
5201 $count = $count - 1
5202 if ($count == 0) goto label
5204 Note that this might need a scratch register if $count
5205 is stored in memory. */
5207 /* A structure describing one doloop_begin. */
5208 struct mep_doloop_begin {
5209 /* The next doloop_begin with the same tag. */
5210 struct mep_doloop_begin *next;
5212 /* The instruction itself. */
5213 rtx insn;
5215 /* The initial counter value. This is known to be a general register. */
5216 rtx counter;
5219 /* A structure describing a doloop_end. */
5220 struct mep_doloop_end {
5221 /* The next doloop_end with the same loop tag. */
5222 struct mep_doloop_end *next;
5224 /* The instruction itself. */
5225 rtx insn;
5227 /* The first instruction after INSN when the branch isn't taken. */
5228 rtx fallthrough;
5230 /* The location of the counter value. Since doloop_end_internal is a
5231 jump instruction, it has to allow the counter to be stored anywhere
5232 (any non-fixed register or memory location). */
5233 rtx counter;
5235 /* The target label (the place where the insn branches when the counter
5236 isn't zero). */
5237 rtx label;
5239 /* A scratch register. Only available when COUNTER isn't stored
5240 in a general register. */
5241 rtx scratch;
5245 /* One do-while loop. */
5246 struct mep_doloop {
5247 /* All the doloop_begins for this loop (in no particular order). */
5248 struct mep_doloop_begin *begin;
5250 /* All the doloop_ends. When there is more than one, arrange things
5251 so that the first one is the most likely to be X in case (2) above. */
5252 struct mep_doloop_end *end;
5256 /* Return true if LOOP can be converted into repeat/repeat_end form
5257 (that is, if it matches cases (1) or (2) above). */
5259 static bool
5260 mep_repeat_loop_p (struct mep_doloop *loop)
5262 struct mep_doloop_end *end;
5263 rtx fallthrough;
5265 /* There must be exactly one doloop_begin and at least one doloop_end. */
5266 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5267 return false;
5269 /* The first doloop_end (X) must branch back to the insn after
5270 the doloop_begin. */
5271 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5272 return false;
5274 /* All the other doloop_ends must branch to the same place as X.
5275 When the branch isn't taken, they must jump to the instruction
5276 after X. */
5277 fallthrough = loop->end->fallthrough;
5278 for (end = loop->end->next; end != 0; end = end->next)
5279 if (end->label != loop->end->label
5280 || !simplejump_p (end->fallthrough)
5281 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5282 return false;
5284 return true;
5288 /* The main repeat reorg function. See comment above for details. */
5290 static void
5291 mep_reorg_repeat (rtx insns)
5293 rtx insn;
5294 struct mep_doloop *loops, *loop;
5295 struct mep_doloop_begin *begin;
5296 struct mep_doloop_end *end;
5298 /* Quick exit if we haven't created any loops. */
5299 if (cfun->machine->doloop_tags == 0)
5300 return;
5302 /* Create an array of mep_doloop structures. */
5303 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5304 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5306 /* Search the function for do-while insns and group them by loop tag. */
5307 for (insn = insns; insn; insn = NEXT_INSN (insn))
5308 if (INSN_P (insn))
5309 switch (recog_memoized (insn))
5311 case CODE_FOR_doloop_begin_internal:
5312 insn_extract (insn);
5313 loop = &loops[INTVAL (recog_data.operand[2])];
5315 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5316 begin->next = loop->begin;
5317 begin->insn = insn;
5318 begin->counter = recog_data.operand[0];
5320 loop->begin = begin;
5321 break;
5323 case CODE_FOR_doloop_end_internal:
5324 insn_extract (insn);
5325 loop = &loops[INTVAL (recog_data.operand[2])];
5327 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5328 end->insn = insn;
5329 end->fallthrough = next_real_insn (insn);
5330 end->counter = recog_data.operand[0];
5331 end->label = recog_data.operand[1];
5332 end->scratch = recog_data.operand[3];
5334 /* If this insn falls through to an unconditional jump,
5335 give it a lower priority than the others. */
5336 if (loop->end != 0 && simplejump_p (end->fallthrough))
5338 end->next = loop->end->next;
5339 loop->end->next = end;
5341 else
5343 end->next = loop->end;
5344 loop->end = end;
5346 break;
5349 /* Convert the insns for each loop in turn. */
5350 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5351 if (mep_repeat_loop_p (loop))
5353 /* Case (1) or (2). */
5354 rtx repeat_label, label_ref;
5356 /* Create a new label for the repeat insn. */
5357 repeat_label = gen_label_rtx ();
5359 /* Replace the doloop_begin with a repeat. */
5360 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5361 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5362 loop->begin->insn);
5363 delete_insn (loop->begin->insn);
5365 /* Insert the repeat label before the first doloop_end.
5366 Fill the gap with nops if there are other doloop_ends. */
5367 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5368 false, loop->end->next != 0);
5370 /* Emit a repeat_end (to improve the readability of the output). */
5371 emit_insn_before (gen_repeat_end (), loop->end->insn);
5373 /* Delete the first doloop_end. */
5374 delete_insn (loop->end->insn);
5376 /* Replace the others with branches to REPEAT_LABEL. */
5377 for (end = loop->end->next; end != 0; end = end->next)
5379 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5380 delete_insn (end->insn);
5381 delete_insn (end->fallthrough);
5384 else
5386 /* Case (3). First replace all the doloop_begins with increment
5387 instructions. */
5388 for (begin = loop->begin; begin != 0; begin = begin->next)
5390 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5391 begin->counter, const1_rtx),
5392 begin->insn);
5393 delete_insn (begin->insn);
5396 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5397 for (end = loop->end; end != 0; end = end->next)
5399 rtx reg;
5401 start_sequence ();
5403 /* Load the counter value into a general register. */
5404 reg = end->counter;
5405 if (!REG_P (reg) || REGNO (reg) > 15)
5407 reg = end->scratch;
5408 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5411 /* Decrement the counter. */
5412 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5413 constm1_rtx));
5415 /* Copy it back to its original location. */
5416 if (reg != end->counter)
5417 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5419 /* Jump back to the start label. */
5420 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5421 end->label));
5422 JUMP_LABEL (insn) = end->label;
5423 LABEL_NUSES (end->label)++;
5425 /* Emit the whole sequence before the doloop_end. */
5426 insn = get_insns ();
5427 end_sequence ();
5428 emit_insn_before (insn, end->insn);
5430 /* Delete the doloop_end. */
5431 delete_insn (end->insn);
5437 static bool
5438 mep_invertable_branch_p (rtx insn)
5440 rtx cond, set;
5441 enum rtx_code old_code;
5442 int i;
5444 set = PATTERN (insn);
5445 if (GET_CODE (set) != SET)
5446 return false;
5447 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5448 return false;
5449 cond = XEXP (XEXP (set, 1), 0);
5450 old_code = GET_CODE (cond);
5451 switch (old_code)
5453 case EQ:
5454 PUT_CODE (cond, NE);
5455 break;
5456 case NE:
5457 PUT_CODE (cond, EQ);
5458 break;
5459 case LT:
5460 PUT_CODE (cond, GE);
5461 break;
5462 case GE:
5463 PUT_CODE (cond, LT);
5464 break;
5465 default:
5466 return false;
5468 INSN_CODE (insn) = -1;
5469 i = recog_memoized (insn);
5470 PUT_CODE (cond, old_code);
5471 INSN_CODE (insn) = -1;
5472 return i >= 0;
5475 static void
5476 mep_invert_branch (rtx insn, rtx after)
5478 rtx cond, set, label;
5479 int i;
5481 set = PATTERN (insn);
5483 gcc_assert (GET_CODE (set) == SET);
5484 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5486 cond = XEXP (XEXP (set, 1), 0);
5487 switch (GET_CODE (cond))
5489 case EQ:
5490 PUT_CODE (cond, NE);
5491 break;
5492 case NE:
5493 PUT_CODE (cond, EQ);
5494 break;
5495 case LT:
5496 PUT_CODE (cond, GE);
5497 break;
5498 case GE:
5499 PUT_CODE (cond, LT);
5500 break;
5501 default:
5502 gcc_unreachable ();
5504 label = gen_label_rtx ();
5505 emit_label_after (label, after);
5506 for (i=1; i<=2; i++)
5507 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5509 rtx ref = XEXP (XEXP (set, 1), i);
5510 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5511 delete_insn (XEXP (ref, 0));
5512 XEXP (ref, 0) = label;
5513 LABEL_NUSES (label) ++;
5514 JUMP_LABEL (insn) = label;
5516 INSN_CODE (insn) = -1;
5517 i = recog_memoized (insn);
5518 gcc_assert (i >= 0);
5521 static void
5522 mep_reorg_erepeat (rtx insns)
5524 rtx insn, prev, l, x;
5525 int count;
5527 for (insn = insns; insn; insn = NEXT_INSN (insn))
5528 if (JUMP_P (insn)
5529 && mep_invertable_branch_p (insn))
5531 if (dump_file)
5533 fprintf (dump_file, "\n------------------------------\n");
5534 fprintf (dump_file, "erepeat: considering this jump:\n");
5535 print_rtl_single (dump_file, insn);
5537 count = simplejump_p (insn) ? 0 : 1;
5538 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5540 if (CALL_P (prev) || BARRIER_P (prev))
5541 break;
5543 if (prev == JUMP_LABEL (insn))
5545 rtx newlast;
5546 if (dump_file)
5547 fprintf (dump_file, "found loop top, %d insns\n", count);
5549 if (LABEL_NUSES (prev) == 1)
5550 /* We're the only user, always safe */ ;
5551 else if (LABEL_NUSES (prev) == 2)
5553 /* See if there's a barrier before this label. If
5554 so, we know nobody inside the loop uses it.
5555 But we must be careful to put the erepeat
5556 *after* the label. */
5557 rtx barrier;
5558 for (barrier = PREV_INSN (prev);
5559 barrier && NOTE_P (barrier);
5560 barrier = PREV_INSN (barrier))
5562 if (barrier && ! BARRIER_P (barrier))
5563 break;
5565 else
5567 /* We don't know who else, within or without our loop, uses this */
5568 if (dump_file)
5569 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5570 break;
5573 /* Generate a label to be used by the erepat insn. */
5574 l = gen_label_rtx ();
5576 /* Insert the erepeat after INSN's target label. */
5577 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5578 LABEL_NUSES (l)++;
5579 emit_insn_after (x, prev);
5581 /* Insert the erepeat label. */
5582 newlast = (mep_insert_repeat_label_last
5583 (insn, l, !simplejump_p (insn), false));
5584 if (simplejump_p (insn))
5586 emit_insn_before (gen_erepeat_end (), insn);
5587 delete_insn (insn);
5589 else
5591 mep_invert_branch (insn, newlast);
5592 emit_insn_after (gen_erepeat_end (), newlast);
5594 break;
5597 if (LABEL_P (prev))
5599 /* A label is OK if there is exactly one user, and we
5600 can find that user before the next label. */
5601 rtx user = 0;
5602 int safe = 0;
5603 if (LABEL_NUSES (prev) == 1)
5605 for (user = PREV_INSN (prev);
5606 user && (INSN_P (user) || NOTE_P (user));
5607 user = PREV_INSN (user))
5608 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5610 safe = INSN_UID (user);
5611 break;
5614 if (!safe)
5615 break;
5616 if (dump_file)
5617 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5618 safe, INSN_UID (prev));
5621 if (INSN_P (prev))
5623 count ++;
5627 if (dump_file)
5628 fprintf (dump_file, "\n==============================\n");
5631 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5632 always do this on its own. */
5634 static void
5635 mep_jmp_return_reorg (rtx insns)
5637 rtx insn, label, ret;
5638 int ret_code;
5640 for (insn = insns; insn; insn = NEXT_INSN (insn))
5641 if (simplejump_p (insn))
5643 /* Find the fist real insn the jump jumps to. */
5644 label = ret = JUMP_LABEL (insn);
5645 while (ret
5646 && (NOTE_P (ret)
5647 || LABEL_P (ret)
5648 || GET_CODE (PATTERN (ret)) == USE))
5649 ret = NEXT_INSN (ret);
5651 if (ret)
5653 /* Is it a return? */
5654 ret_code = recog_memoized (ret);
5655 if (ret_code == CODE_FOR_return_internal
5656 || ret_code == CODE_FOR_eh_return_internal)
5658 /* It is. Replace the jump with a return. */
5659 LABEL_NUSES (label) --;
5660 if (LABEL_NUSES (label) == 0)
5661 delete_insn (label);
5662 PATTERN (insn) = copy_rtx (PATTERN (ret));
5663 INSN_CODE (insn) = -1;
5670 static void
5671 mep_reorg_addcombine (rtx insns)
5673 rtx i, n;
5675 for (i = insns; i; i = NEXT_INSN (i))
5676 if (INSN_P (i)
5677 && INSN_CODE (i) == CODE_FOR_addsi3
5678 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5679 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5680 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5681 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5683 n = NEXT_INSN (i);
5684 if (INSN_P (n)
5685 && INSN_CODE (n) == CODE_FOR_addsi3
5686 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5687 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5688 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5689 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5691 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5692 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5693 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5694 && ic + nc < 32767
5695 && ic + nc > -32768)
5697 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5698 NEXT_INSN (i) = NEXT_INSN (n);
5699 if (NEXT_INSN (i))
5700 PREV_INSN (NEXT_INSN (i)) = i;
5706 /* If this insn adjusts the stack, return the adjustment, else return
5707 zero. */
5708 static int
5709 add_sp_insn_p (rtx insn)
5711 rtx pat;
5713 if (! single_set (insn))
5714 return 0;
5715 pat = PATTERN (insn);
5716 if (GET_CODE (SET_DEST (pat)) != REG)
5717 return 0;
5718 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5719 return 0;
5720 if (GET_CODE (SET_SRC (pat)) != PLUS)
5721 return 0;
5722 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5723 return 0;
5724 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5725 return 0;
5726 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5727 return 0;
5728 return INTVAL (XEXP (SET_SRC (pat), 1));
5731 /* Check for trivial functions that set up an unneeded stack
5732 frame. */
5733 static void
5734 mep_reorg_noframe (rtx insns)
5736 rtx start_frame_insn;
5737 rtx end_frame_insn = 0;
5738 int sp_adjust, sp2;
5739 rtx sp;
5741 /* The first insn should be $sp = $sp + N */
5742 while (insns && ! INSN_P (insns))
5743 insns = NEXT_INSN (insns);
5744 if (!insns)
5745 return;
5747 sp_adjust = add_sp_insn_p (insns);
5748 if (sp_adjust == 0)
5749 return;
5751 start_frame_insn = insns;
5752 sp = SET_DEST (PATTERN (start_frame_insn));
5754 insns = next_real_insn (insns);
5756 while (insns)
5758 rtx next = next_real_insn (insns);
5759 if (!next)
5760 break;
5762 sp2 = add_sp_insn_p (insns);
5763 if (sp2)
5765 if (end_frame_insn)
5766 return;
5767 end_frame_insn = insns;
5768 if (sp2 != -sp_adjust)
5769 return;
5771 else if (mep_mentioned_p (insns, sp, 0))
5772 return;
5773 else if (CALL_P (insns))
5774 return;
5776 insns = next;
5779 if (end_frame_insn)
5781 delete_insn (start_frame_insn);
5782 delete_insn (end_frame_insn);
5786 static void
5787 mep_reorg (void)
5789 rtx insns = get_insns ();
5791 /* We require accurate REG_DEAD notes. */
5792 compute_bb_for_insn ();
5793 df_note_add_problem ();
5794 df_analyze ();
5796 mep_reorg_addcombine (insns);
5797 #if EXPERIMENTAL_REGMOVE_REORG
5798 /* VLIW packing has been done already, so we can't just delete things. */
5799 if (!mep_vliw_function_p (cfun->decl))
5800 mep_reorg_regmove (insns);
5801 #endif
5802 mep_jmp_return_reorg (insns);
5803 mep_bundle_insns (insns);
5804 mep_reorg_repeat (insns);
5805 if (optimize
5806 && !profile_flag
5807 && !profile_arc_flag
5808 && TARGET_OPT_REPEAT
5809 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5810 mep_reorg_erepeat (insns);
5812 /* This may delete *insns so make sure it's last. */
5813 mep_reorg_noframe (insns);
5815 df_finish_pass (false);
5820 /*----------------------------------------------------------------------*/
5821 /* Builtins */
5822 /*----------------------------------------------------------------------*/
5824 /* Element X gives the index into cgen_insns[] of the most general
5825 implementation of intrinsic X. Unimplemented intrinsics are
5826 mapped to -1. */
5827 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5829 /* Element X gives the index of another instruction that is mapped to
5830 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5831 instruction.
5833 Things are set up so that mep_intrinsic_chain[X] < X. */
5834 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5836 /* The bitmask for the current ISA. The ISA masks are declared
5837 in mep-intrin.h. */
5838 unsigned int mep_selected_isa;
5840 struct mep_config {
5841 const char *config_name;
5842 unsigned int isa;
5845 static struct mep_config mep_configs[] = {
5846 #ifdef COPROC_SELECTION_TABLE
5847 COPROC_SELECTION_TABLE,
5848 #endif
5849 { 0, 0 }
5852 /* Initialize the global intrinsics variables above. */
5854 static void
5855 mep_init_intrinsics (void)
5857 size_t i;
5859 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5860 mep_selected_isa = mep_configs[0].isa;
5861 if (mep_config_string != 0)
5862 for (i = 0; mep_configs[i].config_name; i++)
5863 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5865 mep_selected_isa = mep_configs[i].isa;
5866 break;
5869 /* Assume all intrinsics are unavailable. */
5870 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5871 mep_intrinsic_insn[i] = -1;
5873 /* Build up the global intrinsic tables. */
5874 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5875 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5877 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5878 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5880 /* See whether we can directly move values between one coprocessor
5881 register and another. */
5882 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5883 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5884 mep_have_copro_copro_moves_p = true;
5886 /* See whether we can directly move values between core and
5887 coprocessor registers. */
5888 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5889 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5891 mep_have_core_copro_moves_p = 1;
5894 /* Declare all available intrinsic functions. Called once only. */
5896 static tree cp_data_bus_int_type_node;
5897 static tree opaque_vector_type_node;
5898 static tree v8qi_type_node;
5899 static tree v4hi_type_node;
5900 static tree v2si_type_node;
5901 static tree v8uqi_type_node;
5902 static tree v4uhi_type_node;
5903 static tree v2usi_type_node;
5905 static tree
5906 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5908 switch (cr)
5910 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5911 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5912 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5913 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5914 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5915 case cgen_regnum_operand_type_CHAR: return char_type_node;
5916 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5917 case cgen_regnum_operand_type_SI: return intSI_type_node;
5918 case cgen_regnum_operand_type_DI: return intDI_type_node;
5919 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5920 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5921 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5922 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5923 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5924 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5925 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5926 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5927 default:
5928 return void_type_node;
5932 static void
5933 mep_init_builtins (void)
5935 size_t i;
5937 if (TARGET_64BIT_CR_REGS)
5938 cp_data_bus_int_type_node = long_long_integer_type_node;
5939 else
5940 cp_data_bus_int_type_node = long_integer_type_node;
5942 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5943 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5944 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5945 v2si_type_node = build_vector_type (intSI_type_node, 2);
5946 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5947 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5948 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5950 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5952 add_builtin_type ("cp_vector", opaque_vector_type_node);
5954 add_builtin_type ("cp_v8qi", v8qi_type_node);
5955 add_builtin_type ("cp_v4hi", v4hi_type_node);
5956 add_builtin_type ("cp_v2si", v2si_type_node);
5958 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5959 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5960 add_builtin_type ("cp_v2usi", v2usi_type_node);
5962 /* Intrinsics like mep_cadd3 are implemented with two groups of
5963 instructions, one which uses UNSPECs and one which uses a specific
5964 rtl code such as PLUS. Instructions in the latter group belong
5965 to GROUP_KNOWN_CODE.
5967 In such cases, the intrinsic will have two entries in the global
5968 tables above. The unspec form is accessed using builtin functions
5969 while the specific form is accessed using the mep_* enum in
5970 mep-intrin.h.
5972 The idea is that __cop arithmetic and builtin functions have
5973 different optimization requirements. If mep_cadd3() appears in
5974 the source code, the user will surely except gcc to use cadd3
5975 rather than a work-alike such as add3. However, if the user
5976 just writes "a + b", where a or b are __cop variables, it is
5977 reasonable for gcc to choose a core instruction rather than
5978 cadd3 if it believes that is more optimal. */
5979 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5980 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5981 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5983 tree ret_type = void_type_node;
5984 tree bi_type;
5986 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5987 continue;
5989 if (cgen_insns[i].cret_p)
5990 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5992 bi_type = build_function_type_list (ret_type, NULL_TREE);
5993 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5994 bi_type,
5995 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5999 /* Report the unavailablity of the given intrinsic. */
6001 #if 1
6002 static void
6003 mep_intrinsic_unavailable (int intrinsic)
6005 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6007 if (already_reported_p[intrinsic])
6008 return;
6010 if (mep_intrinsic_insn[intrinsic] < 0)
6011 error ("coprocessor intrinsic %qs is not available in this configuration",
6012 cgen_intrinsics[intrinsic]);
6013 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6014 error ("%qs is not available in VLIW functions",
6015 cgen_intrinsics[intrinsic]);
6016 else
6017 error ("%qs is not available in non-VLIW functions",
6018 cgen_intrinsics[intrinsic]);
6020 already_reported_p[intrinsic] = 1;
6022 #endif
6025 /* See if any implementation of INTRINSIC is available to the
6026 current function. If so, store the most general implementation
6027 in *INSN_PTR and return true. Return false otherwise. */
6029 static bool
6030 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6032 int i;
6034 i = mep_intrinsic_insn[intrinsic];
6035 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6036 i = mep_intrinsic_chain[i];
6038 if (i >= 0)
6040 *insn_ptr = &cgen_insns[i];
6041 return true;
6043 return false;
6047 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6048 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6049 try using a work-alike instead. In this case, the returned insn
6050 may have three operands rather than two. */
6052 static bool
6053 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6055 size_t i;
6057 if (intrinsic == mep_cmov)
6059 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6060 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6061 return true;
6062 return false;
6064 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6068 /* If ARG is a register operand that is the same size as MODE, convert it
6069 to MODE using a subreg. Otherwise return ARG as-is. */
6071 static rtx
6072 mep_convert_arg (enum machine_mode mode, rtx arg)
6074 if (GET_MODE (arg) != mode
6075 && register_operand (arg, VOIDmode)
6076 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6077 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6078 return arg;
6082 /* Apply regnum conversions to ARG using the description given by REGNUM.
6083 Return the new argument on success and null on failure. */
6085 static rtx
6086 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6088 if (regnum->count == 0)
6089 return arg;
6091 if (GET_CODE (arg) != CONST_INT
6092 || INTVAL (arg) < 0
6093 || INTVAL (arg) >= regnum->count)
6094 return 0;
6096 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6100 /* Try to make intrinsic argument ARG match the given operand.
6101 UNSIGNED_P is true if the argument has an unsigned type. */
6103 static rtx
6104 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6105 int unsigned_p)
6107 if (GET_CODE (arg) == CONST_INT)
6109 /* CONST_INTs can only be bound to integer operands. */
6110 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6111 return 0;
6113 else if (GET_CODE (arg) == CONST_DOUBLE)
6114 /* These hold vector constants. */;
6115 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6117 /* If the argument is a different size from what's expected, we must
6118 have a value in the right mode class in order to convert it. */
6119 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6120 return 0;
6122 /* If the operand is an rvalue, promote or demote it to match the
6123 operand's size. This might not need extra instructions when
6124 ARG is a register value. */
6125 if (operand->constraint[0] != '=')
6126 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6129 /* If the operand is an lvalue, bind the operand to a new register.
6130 The caller will copy this value into ARG after the main
6131 instruction. By doing this always, we produce slightly more
6132 optimal code. */
6133 /* But not for control registers. */
6134 if (operand->constraint[0] == '='
6135 && (! REG_P (arg)
6136 || ! (CONTROL_REGNO_P (REGNO (arg))
6137 || CCR_REGNO_P (REGNO (arg))
6138 || CR_REGNO_P (REGNO (arg)))
6140 return gen_reg_rtx (operand->mode);
6142 /* Try simple mode punning. */
6143 arg = mep_convert_arg (operand->mode, arg);
6144 if (operand->predicate (arg, operand->mode))
6145 return arg;
6147 /* See if forcing the argument into a register will make it match. */
6148 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6149 arg = force_reg (operand->mode, arg);
6150 else
6151 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6152 if (operand->predicate (arg, operand->mode))
6153 return arg;
6155 return 0;
6159 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6160 function FNNAME. OPERAND describes the operand to which ARGNUM
6161 is mapped. */
6163 static void
6164 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6165 int argnum, tree fnname)
6167 size_t i;
6169 if (GET_CODE (arg) == CONST_INT)
6170 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6171 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6173 const struct cgen_immediate_predicate *predicate;
6174 HOST_WIDE_INT argval;
6176 predicate = &cgen_immediate_predicates[i];
6177 argval = INTVAL (arg);
6178 if (argval < predicate->lower || argval >= predicate->upper)
6179 error ("argument %d of %qE must be in the range %d...%d",
6180 argnum, fnname, predicate->lower, predicate->upper - 1);
6181 else
6182 error ("argument %d of %qE must be a multiple of %d",
6183 argnum, fnname, predicate->align);
6184 return;
6187 error ("incompatible type for argument %d of %qE", argnum, fnname);
6190 static rtx
6191 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6192 rtx subtarget ATTRIBUTE_UNUSED,
6193 enum machine_mode mode ATTRIBUTE_UNUSED,
6194 int ignore ATTRIBUTE_UNUSED)
6196 rtx pat, op[10], arg[10];
6197 unsigned int a;
6198 int opindex, unsigned_p[10];
6199 tree fndecl, args;
6200 unsigned int n_args;
6201 tree fnname;
6202 const struct cgen_insn *cgen_insn;
6203 const struct insn_data_d *idata;
6204 unsigned int first_arg = 0;
6205 unsigned int builtin_n_args;
6207 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6208 fnname = DECL_NAME (fndecl);
6210 /* Find out which instruction we should emit. Note that some coprocessor
6211 intrinsics may only be available in VLIW mode, or only in normal mode. */
6212 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6214 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6215 return NULL_RTX;
6217 idata = &insn_data[cgen_insn->icode];
6219 builtin_n_args = cgen_insn->num_args;
6221 if (cgen_insn->cret_p)
6223 if (cgen_insn->cret_p > 1)
6224 builtin_n_args ++;
6225 first_arg = 1;
6226 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6227 builtin_n_args --;
6230 /* Evaluate each argument. */
6231 n_args = call_expr_nargs (exp);
6233 if (n_args < builtin_n_args)
6235 error ("too few arguments to %qE", fnname);
6236 return NULL_RTX;
6238 if (n_args > builtin_n_args)
6240 error ("too many arguments to %qE", fnname);
6241 return NULL_RTX;
6244 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6246 tree value;
6248 args = CALL_EXPR_ARG (exp, a - first_arg);
6250 value = args;
6252 #if 0
6253 if (cgen_insn->regnums[a].reference_p)
6255 if (TREE_CODE (value) != ADDR_EXPR)
6257 debug_tree(value);
6258 error ("argument %d of %qE must be an address", a+1, fnname);
6259 return NULL_RTX;
6261 value = TREE_OPERAND (value, 0);
6263 #endif
6265 /* If the argument has been promoted to int, get the unpromoted
6266 value. This is necessary when sub-int memory values are bound
6267 to reference parameters. */
6268 if (TREE_CODE (value) == NOP_EXPR
6269 && TREE_TYPE (value) == integer_type_node
6270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6271 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6272 < TYPE_PRECISION (TREE_TYPE (value))))
6273 value = TREE_OPERAND (value, 0);
6275 /* If the argument has been promoted to double, get the unpromoted
6276 SFmode value. This is necessary for FMAX support, for example. */
6277 if (TREE_CODE (value) == NOP_EXPR
6278 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6279 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6280 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6281 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6282 value = TREE_OPERAND (value, 0);
6284 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6285 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6286 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6287 if (cgen_insn->regnums[a].reference_p)
6289 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6290 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6292 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6294 if (arg[a] == 0)
6296 error ("argument %d of %qE must be in the range %d...%d",
6297 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6298 return NULL_RTX;
6302 for (a = 0; a < first_arg; a++)
6304 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6305 arg[a] = target;
6306 else
6307 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6310 /* Convert the arguments into a form suitable for the intrinsic.
6311 Report an error if this isn't possible. */
6312 for (opindex = 0; opindex < idata->n_operands; opindex++)
6314 a = cgen_insn->op_mapping[opindex];
6315 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6316 arg[a], unsigned_p[a]);
6317 if (op[opindex] == 0)
6319 mep_incompatible_arg (&idata->operand[opindex],
6320 arg[a], a + 1 - first_arg, fnname);
6321 return NULL_RTX;
6325 /* Emit the instruction. */
6326 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6327 op[5], op[6], op[7], op[8], op[9]);
6329 if (GET_CODE (pat) == SET
6330 && GET_CODE (SET_DEST (pat)) == PC
6331 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6332 emit_jump_insn (pat);
6333 else
6334 emit_insn (pat);
6336 /* Copy lvalues back to their final locations. */
6337 for (opindex = 0; opindex < idata->n_operands; opindex++)
6338 if (idata->operand[opindex].constraint[0] == '=')
6340 a = cgen_insn->op_mapping[opindex];
6341 if (a >= first_arg)
6343 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6344 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6345 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6346 op[opindex]));
6347 else
6349 /* First convert the operand to the right mode, then copy it
6350 into the destination. Doing the conversion as a separate
6351 step (rather than using convert_move) means that we can
6352 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6353 refer to the same register. */
6354 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6355 op[opindex], unsigned_p[a]);
6356 if (!rtx_equal_p (arg[a], op[opindex]))
6357 emit_move_insn (arg[a], op[opindex]);
6362 if (first_arg > 0 && target && target != op[0])
6364 emit_move_insn (target, op[0]);
6367 return target;
6370 static bool
6371 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6373 return false;
6376 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6377 a global register. */
6379 static int
6380 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6382 int regno;
6383 rtx x = *loc;
6385 if (! x)
6386 return 0;
6388 switch (GET_CODE (x))
6390 case SUBREG:
6391 if (REG_P (SUBREG_REG (x)))
6393 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6394 && global_regs[subreg_regno (x)])
6395 return 1;
6396 return 0;
6398 break;
6400 case REG:
6401 regno = REGNO (x);
6402 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6403 return 1;
6404 return 0;
6406 case SCRATCH:
6407 case PC:
6408 case CC0:
6409 case CONST_INT:
6410 case CONST_DOUBLE:
6411 case CONST:
6412 case LABEL_REF:
6413 return 0;
6415 case CALL:
6416 /* A non-constant call might use a global register. */
6417 return 1;
6419 default:
6420 break;
6423 return 0;
6426 /* Returns nonzero if X mentions a global register. */
6428 static int
6429 global_reg_mentioned_p (rtx x)
6431 if (INSN_P (x))
6433 if (CALL_P (x))
6435 if (! RTL_CONST_OR_PURE_CALL_P (x))
6436 return 1;
6437 x = CALL_INSN_FUNCTION_USAGE (x);
6438 if (x == 0)
6439 return 0;
6441 else
6442 x = PATTERN (x);
6445 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6447 /* Scheduling hooks for VLIW mode.
6449 Conceptually this is very simple: we have a two-pack architecture
6450 that takes one core insn and one coprocessor insn to make up either
6451 a 32- or 64-bit instruction word (depending on the option bit set in
6452 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6453 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6454 and one 48-bit cop insn or two 32-bit core/cop insns.
6456 In practice, instruction selection will be a bear. Consider in
6457 VL64 mode the following insns
6459 add $1, 1
6460 cmov $cr0, $0
6462 these cannot pack, since the add is a 16-bit core insn and cmov
6463 is a 32-bit cop insn. However,
6465 add3 $1, $1, 1
6466 cmov $cr0, $0
6468 packs just fine. For good VLIW code generation in VL64 mode, we
6469 will have to have 32-bit alternatives for many of the common core
6470 insns. Not implemented. */
6472 static int
6473 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6475 int cost_specified;
6477 if (REG_NOTE_KIND (link) != 0)
6479 /* See whether INSN and DEP_INSN are intrinsics that set the same
6480 hard register. If so, it is more important to free up DEP_INSN
6481 than it is to free up INSN.
6483 Note that intrinsics like mep_mulr are handled differently from
6484 the equivalent mep.md patterns. In mep.md, if we don't care
6485 about the value of $lo and $hi, the pattern will just clobber
6486 the registers, not set them. Since clobbers don't count as
6487 output dependencies, it is often possible to reorder two mulrs,
6488 even after reload.
6490 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6491 so any pair of mep_mulr()s will be inter-dependent. We should
6492 therefore give the first mep_mulr() a higher priority. */
6493 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6494 && global_reg_mentioned_p (PATTERN (insn))
6495 && global_reg_mentioned_p (PATTERN (dep_insn)))
6496 return 1;
6498 /* If the dependence is an anti or output dependence, assume it
6499 has no cost. */
6500 return 0;
6503 /* If we can't recognize the insns, we can't really do anything. */
6504 if (recog_memoized (dep_insn) < 0)
6505 return cost;
6507 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6508 attribute instead. */
6509 if (!TARGET_H1)
6511 cost_specified = get_attr_latency (dep_insn);
6512 if (cost_specified != 0)
6513 return cost_specified;
6516 return cost;
6519 /* ??? We don't properly compute the length of a load/store insn,
6520 taking into account the addressing mode. */
6522 static int
6523 mep_issue_rate (void)
6525 return TARGET_IVC2 ? 3 : 2;
6528 /* Return true if function DECL was declared with the vliw attribute. */
6530 bool
6531 mep_vliw_function_p (tree decl)
6533 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6536 static rtx
6537 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6539 int i;
6541 for (i = nready - 1; i >= 0; --i)
6543 rtx insn = ready[i];
6544 if (recog_memoized (insn) >= 0
6545 && get_attr_slot (insn) == slot
6546 && get_attr_length (insn) == length)
6547 return insn;
6550 return NULL_RTX;
6553 static void
6554 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6556 int i;
6558 for (i = 0; i < nready; ++i)
6559 if (ready[i] == insn)
6561 for (; i < nready - 1; ++i)
6562 ready[i] = ready[i + 1];
6563 ready[i] = insn;
6564 return;
6567 gcc_unreachable ();
6570 static void
6571 mep_print_sched_insn (FILE *dump, rtx insn)
6573 const char *slots = "none";
6574 const char *name = NULL;
6575 int code;
6576 char buf[30];
6578 if (GET_CODE (PATTERN (insn)) == SET
6579 || GET_CODE (PATTERN (insn)) == PARALLEL)
6581 switch (get_attr_slots (insn))
6583 case SLOTS_CORE: slots = "core"; break;
6584 case SLOTS_C3: slots = "c3"; break;
6585 case SLOTS_P0: slots = "p0"; break;
6586 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6587 case SLOTS_P0_P1: slots = "p0,p1"; break;
6588 case SLOTS_P0S: slots = "p0s"; break;
6589 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6590 case SLOTS_P1: slots = "p1"; break;
6591 default:
6592 sprintf(buf, "%d", get_attr_slots (insn));
6593 slots = buf;
6594 break;
6597 if (GET_CODE (PATTERN (insn)) == USE)
6598 slots = "use";
6600 code = INSN_CODE (insn);
6601 if (code >= 0)
6602 name = get_insn_name (code);
6603 if (!name)
6604 name = "{unknown}";
6606 fprintf (dump,
6607 "insn %4d %4d %8s %s\n",
6608 code,
6609 INSN_UID (insn),
6610 name,
6611 slots);
6614 static int
6615 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6616 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6617 int *pnready, int clock ATTRIBUTE_UNUSED)
6619 int nready = *pnready;
6620 rtx core_insn, cop_insn;
6621 int i;
6623 if (dump && sched_verbose > 1)
6625 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6626 for (i=0; i<nready; i++)
6627 mep_print_sched_insn (dump, ready[i]);
6628 fprintf (dump, "\n");
6631 if (!mep_vliw_function_p (cfun->decl))
6632 return 1;
6633 if (nready < 2)
6634 return 1;
6636 /* IVC2 uses a DFA to determine what's ready and what's not. */
6637 if (TARGET_IVC2)
6638 return nready;
6640 /* We can issue either a core or coprocessor instruction.
6641 Look for a matched pair of insns to reorder. If we don't
6642 find any, don't second-guess the scheduler's priorities. */
6644 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6645 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6646 TARGET_OPT_VL64 ? 6 : 2)))
6648 else if (TARGET_OPT_VL64
6649 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6650 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6652 else
6653 /* We didn't find a pair. Issue the single insn at the head
6654 of the ready list. */
6655 return 1;
6657 /* Reorder the two insns first. */
6658 mep_move_ready_insn (ready, nready, core_insn);
6659 mep_move_ready_insn (ready, nready - 1, cop_insn);
6660 return 2;
6663 /* A for_each_rtx callback. Return true if *X is a register that is
6664 set by insn PREV. */
6666 static int
6667 mep_store_find_set (rtx *x, void *prev)
6669 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6672 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6673 not the containing insn. */
6675 static bool
6676 mep_store_data_bypass_1 (rtx prev, rtx pat)
6678 /* Cope with intrinsics like swcpa. */
6679 if (GET_CODE (pat) == PARALLEL)
6681 int i;
6683 for (i = 0; i < XVECLEN (pat, 0); i++)
6684 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6685 return true;
6687 return false;
6690 /* Check for some sort of store. */
6691 if (GET_CODE (pat) != SET
6692 || GET_CODE (SET_DEST (pat)) != MEM)
6693 return false;
6695 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6696 The first operand to the unspec is the store data and the other operands
6697 are used to calculate the address. */
6698 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6700 rtx src;
6701 int i;
6703 src = SET_SRC (pat);
6704 for (i = 1; i < XVECLEN (src, 0); i++)
6705 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6706 return false;
6708 return true;
6711 /* Otherwise just check that PREV doesn't modify any register mentioned
6712 in the memory destination. */
6713 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6716 /* Return true if INSN is a store instruction and if the store address
6717 has no true dependence on PREV. */
6719 bool
6720 mep_store_data_bypass_p (rtx prev, rtx insn)
6722 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6725 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6726 is a register other than LO or HI and if PREV sets *X. */
6728 static int
6729 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6731 return (REG_P (*x)
6732 && REGNO (*x) != LO_REGNO
6733 && REGNO (*x) != HI_REGNO
6734 && reg_set_p (*x, (const_rtx) prev));
6737 /* Return true if, apart from HI/LO, there are no true dependencies
6738 between multiplication instructions PREV and INSN. */
6740 bool
6741 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6743 rtx pat;
6745 pat = PATTERN (insn);
6746 if (GET_CODE (pat) == PARALLEL)
6747 pat = XVECEXP (pat, 0, 0);
6748 return (GET_CODE (pat) == SET
6749 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6752 /* Return true if INSN is an ldc instruction that issues to the
6753 MeP-h1 integer pipeline. This is true for instructions that
6754 read from PSW, LP, SAR, HI and LO. */
6756 bool
6757 mep_ipipe_ldc_p (rtx insn)
6759 rtx pat, src;
6761 pat = PATTERN (insn);
6763 /* Cope with instrinsics that set both a hard register and its shadow.
6764 The set of the hard register comes first. */
6765 if (GET_CODE (pat) == PARALLEL)
6766 pat = XVECEXP (pat, 0, 0);
6768 if (GET_CODE (pat) == SET)
6770 src = SET_SRC (pat);
6772 /* Cope with intrinsics. The first operand to the unspec is
6773 the source register. */
6774 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6775 src = XVECEXP (src, 0, 0);
6777 if (REG_P (src))
6778 switch (REGNO (src))
6780 case PSW_REGNO:
6781 case LP_REGNO:
6782 case SAR_REGNO:
6783 case HI_REGNO:
6784 case LO_REGNO:
6785 return true;
6788 return false;
6791 /* Create a VLIW bundle from core instruction CORE and coprocessor
6792 instruction COP. COP always satisfies INSN_P, but CORE can be
6793 either a new pattern or an existing instruction.
6795 Emit the bundle in place of COP and return it. */
6797 static rtx
6798 mep_make_bundle (rtx core, rtx cop)
6800 rtx insn;
6802 /* If CORE is an existing instruction, remove it, otherwise put
6803 the new pattern in an INSN harness. */
6804 if (INSN_P (core))
6805 remove_insn (core);
6806 else
6807 core = make_insn_raw (core);
6809 /* Generate the bundle sequence and replace COP with it. */
6810 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6811 insn = emit_insn_after (insn, cop);
6812 remove_insn (cop);
6814 /* Set up the links of the insns inside the SEQUENCE. */
6815 PREV_INSN (core) = PREV_INSN (insn);
6816 NEXT_INSN (core) = cop;
6817 PREV_INSN (cop) = core;
6818 NEXT_INSN (cop) = NEXT_INSN (insn);
6820 /* Set the VLIW flag for the coprocessor instruction. */
6821 PUT_MODE (core, VOIDmode);
6822 PUT_MODE (cop, BImode);
6824 /* Derive a location for the bundle. Individual instructions cannot
6825 have their own location because there can be no assembler labels
6826 between CORE and COP. */
6827 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core) ? core : cop);
6828 INSN_LOCATION (core) = 0;
6829 INSN_LOCATION (cop) = 0;
6831 return insn;
6834 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6836 static void
6837 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6839 rtx * pinsn = (rtx *) data;
6841 if (*pinsn && reg_mentioned_p (x, *pinsn))
6842 *pinsn = NULL_RTX;
6845 /* Return true if anything in insn X is (anti,output,true) dependent on
6846 anything in insn Y. */
6848 static int
6849 mep_insn_dependent_p (rtx x, rtx y)
6851 rtx tmp;
6853 gcc_assert (INSN_P (x));
6854 gcc_assert (INSN_P (y));
6856 tmp = PATTERN (y);
6857 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6858 if (tmp == NULL_RTX)
6859 return 1;
6861 tmp = PATTERN (x);
6862 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6863 if (tmp == NULL_RTX)
6864 return 1;
6866 return 0;
6869 static int
6870 core_insn_p (rtx insn)
6872 if (GET_CODE (PATTERN (insn)) == USE)
6873 return 0;
6874 if (get_attr_slot (insn) == SLOT_CORE)
6875 return 1;
6876 return 0;
6879 /* Mark coprocessor instructions that can be bundled together with
6880 the immediately preceding core instruction. This is later used
6881 to emit the "+" that tells the assembler to create a VLIW insn.
6883 For unbundled insns, the assembler will automatically add coprocessor
6884 nops, and 16-bit core nops. Due to an apparent oversight in the
6885 spec, the assembler will _not_ automatically add 32-bit core nops,
6886 so we have to emit those here.
6888 Called from mep_insn_reorg. */
6890 static void
6891 mep_bundle_insns (rtx insns)
6893 rtx insn, last = NULL_RTX, first = NULL_RTX;
6894 int saw_scheduling = 0;
6896 /* Only do bundling if we're in vliw mode. */
6897 if (!mep_vliw_function_p (cfun->decl))
6898 return;
6900 /* The first insn in a bundle are TImode, the remainder are
6901 VOIDmode. After this function, the first has VOIDmode and the
6902 rest have BImode. */
6904 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6906 /* First, move any NOTEs that are within a bundle, to the beginning
6907 of the bundle. */
6908 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6910 if (NOTE_P (insn) && first)
6911 /* Don't clear FIRST. */;
6913 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6914 first = insn;
6916 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6918 rtx note, prev;
6920 /* INSN is part of a bundle; FIRST is the first insn in that
6921 bundle. Move all intervening notes out of the bundle.
6922 In addition, since the debug pass may insert a label
6923 whenever the current line changes, set the location info
6924 for INSN to match FIRST. */
6926 INSN_LOCATION (insn) = INSN_LOCATION (first);
6928 note = PREV_INSN (insn);
6929 while (note && note != first)
6931 prev = PREV_INSN (note);
6933 if (NOTE_P (note))
6935 /* Remove NOTE from here... */
6936 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6937 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6938 /* ...and put it in here. */
6939 NEXT_INSN (note) = first;
6940 PREV_INSN (note) = PREV_INSN (first);
6941 NEXT_INSN (PREV_INSN (note)) = note;
6942 PREV_INSN (NEXT_INSN (note)) = note;
6945 note = prev;
6949 else if (!NONJUMP_INSN_P (insn))
6950 first = 0;
6953 /* Now fix up the bundles. */
6954 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6956 if (NOTE_P (insn))
6957 continue;
6959 if (!NONJUMP_INSN_P (insn))
6961 last = 0;
6962 continue;
6965 /* If we're not optimizing enough, there won't be scheduling
6966 info. We detect that here. */
6967 if (GET_MODE (insn) == TImode)
6968 saw_scheduling = 1;
6969 if (!saw_scheduling)
6970 continue;
6972 if (TARGET_IVC2)
6974 rtx core_insn = NULL_RTX;
6976 /* IVC2 slots are scheduled by DFA, so we just accept
6977 whatever the scheduler gives us. However, we must make
6978 sure the core insn (if any) is the first in the bundle.
6979 The IVC2 assembler can insert whatever NOPs are needed,
6980 and allows a COP insn to be first. */
6982 if (NONJUMP_INSN_P (insn)
6983 && GET_CODE (PATTERN (insn)) != USE
6984 && GET_MODE (insn) == TImode)
6986 for (last = insn;
6987 NEXT_INSN (last)
6988 && GET_MODE (NEXT_INSN (last)) == VOIDmode
6989 && NONJUMP_INSN_P (NEXT_INSN (last));
6990 last = NEXT_INSN (last))
6992 if (core_insn_p (last))
6993 core_insn = last;
6995 if (core_insn_p (last))
6996 core_insn = last;
6998 if (core_insn && core_insn != insn)
7000 /* Swap core insn to first in the bundle. */
7002 /* Remove core insn. */
7003 if (PREV_INSN (core_insn))
7004 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7005 if (NEXT_INSN (core_insn))
7006 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7008 /* Re-insert core insn. */
7009 PREV_INSN (core_insn) = PREV_INSN (insn);
7010 NEXT_INSN (core_insn) = insn;
7012 if (PREV_INSN (core_insn))
7013 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7014 PREV_INSN (insn) = core_insn;
7016 PUT_MODE (core_insn, TImode);
7017 PUT_MODE (insn, VOIDmode);
7021 /* The first insn has TImode, the rest have VOIDmode */
7022 if (GET_MODE (insn) == TImode)
7023 PUT_MODE (insn, VOIDmode);
7024 else
7025 PUT_MODE (insn, BImode);
7026 continue;
7029 PUT_MODE (insn, VOIDmode);
7030 if (recog_memoized (insn) >= 0
7031 && get_attr_slot (insn) == SLOT_COP)
7033 if (JUMP_P (insn)
7034 || ! last
7035 || recog_memoized (last) < 0
7036 || get_attr_slot (last) != SLOT_CORE
7037 || (get_attr_length (insn)
7038 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7039 || mep_insn_dependent_p (insn, last))
7041 switch (get_attr_length (insn))
7043 case 8:
7044 break;
7045 case 6:
7046 insn = mep_make_bundle (gen_nop (), insn);
7047 break;
7048 case 4:
7049 if (TARGET_OPT_VL64)
7050 insn = mep_make_bundle (gen_nop32 (), insn);
7051 break;
7052 case 2:
7053 if (TARGET_OPT_VL64)
7054 error ("2 byte cop instructions are"
7055 " not allowed in 64-bit VLIW mode");
7056 else
7057 insn = mep_make_bundle (gen_nop (), insn);
7058 break;
7059 default:
7060 error ("unexpected %d byte cop instruction",
7061 get_attr_length (insn));
7062 break;
7065 else
7066 insn = mep_make_bundle (last, insn);
7069 last = insn;
7074 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7075 Return true on success. This function can fail if the intrinsic
7076 is unavailable or if the operands don't satisfy their predicates. */
7078 bool
7079 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7081 const struct cgen_insn *cgen_insn;
7082 const struct insn_data_d *idata;
7083 rtx newop[10];
7084 int i;
7086 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7087 return false;
7089 idata = &insn_data[cgen_insn->icode];
7090 for (i = 0; i < idata->n_operands; i++)
7092 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7093 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7094 return false;
7097 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7098 newop[3], newop[4], newop[5],
7099 newop[6], newop[7], newop[8]));
7101 return true;
7105 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7106 OPERANDS[0]. Report an error if the instruction could not
7107 be synthesized. OPERANDS[1] is a register_operand. For sign
7108 and zero extensions, it may be smaller than SImode. */
7110 bool
7111 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7112 rtx * operands ATTRIBUTE_UNUSED)
7114 return false;
7118 /* Likewise, but apply a binary operation to OPERANDS[1] and
7119 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7120 can be a general_operand.
7122 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7123 third operand. REG and REG3 take register operands only. */
7125 bool
7126 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7127 int ATTRIBUTE_UNUSED immediate3,
7128 int ATTRIBUTE_UNUSED reg,
7129 int ATTRIBUTE_UNUSED reg3,
7130 rtx * operands ATTRIBUTE_UNUSED)
7132 return false;
7135 static bool
7136 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7137 int opno ATTRIBUTE_UNUSED, int *total,
7138 bool ATTRIBUTE_UNUSED speed_t)
7140 switch (code)
7142 case CONST_INT:
7143 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7144 *total = 0;
7145 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7146 *total = 1;
7147 else
7148 *total = 3;
7149 return true;
7151 case SYMBOL_REF:
7152 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7153 return true;
7155 case MULT:
7156 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7157 ? COSTS_N_INSNS (3)
7158 : COSTS_N_INSNS (2));
7159 return true;
7161 return false;
7164 static int
7165 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7166 enum machine_mode mode ATTRIBUTE_UNUSED,
7167 addr_space_t as ATTRIBUTE_UNUSED,
7168 bool ATTRIBUTE_UNUSED speed_p)
7170 return 1;
7173 static void
7174 mep_asm_init_sections (void)
7176 based_section
7177 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7178 "\t.section .based,\"aw\"");
7180 tinybss_section
7181 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7182 "\t.section .sbss,\"aw\"");
7184 sdata_section
7185 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7186 "\t.section .sdata,\"aw\",@progbits");
7188 far_section
7189 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7190 "\t.section .far,\"aw\"");
7192 farbss_section
7193 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7194 "\t.section .farbss,\"aw\"");
7196 frodata_section
7197 = get_unnamed_section (0, output_section_asm_op,
7198 "\t.section .frodata,\"a\"");
7200 srodata_section
7201 = get_unnamed_section (0, output_section_asm_op,
7202 "\t.section .srodata,\"a\"");
7204 vtext_section
7205 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7206 "\t.section .vtext,\"axv\"\n\t.vliw");
7208 vftext_section
7209 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7210 "\t.section .vftext,\"axv\"\n\t.vliw");
7212 ftext_section
7213 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7214 "\t.section .ftext,\"ax\"\n\t.core");
7218 /* Initialize the GCC target structure. */
7220 #undef TARGET_ASM_FUNCTION_PROLOGUE
7221 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7222 #undef TARGET_ATTRIBUTE_TABLE
7223 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7224 #undef TARGET_COMP_TYPE_ATTRIBUTES
7225 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7226 #undef TARGET_INSERT_ATTRIBUTES
7227 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7228 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7229 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7230 #undef TARGET_CAN_INLINE_P
7231 #define TARGET_CAN_INLINE_P mep_can_inline_p
7232 #undef TARGET_SECTION_TYPE_FLAGS
7233 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7234 #undef TARGET_ASM_NAMED_SECTION
7235 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7236 #undef TARGET_INIT_BUILTINS
7237 #define TARGET_INIT_BUILTINS mep_init_builtins
7238 #undef TARGET_EXPAND_BUILTIN
7239 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7240 #undef TARGET_SCHED_ADJUST_COST
7241 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7242 #undef TARGET_SCHED_ISSUE_RATE
7243 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7244 #undef TARGET_SCHED_REORDER
7245 #define TARGET_SCHED_REORDER mep_sched_reorder
7246 #undef TARGET_STRIP_NAME_ENCODING
7247 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7248 #undef TARGET_ASM_SELECT_SECTION
7249 #define TARGET_ASM_SELECT_SECTION mep_select_section
7250 #undef TARGET_ASM_UNIQUE_SECTION
7251 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7252 #undef TARGET_ENCODE_SECTION_INFO
7253 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7254 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7255 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7256 #undef TARGET_RTX_COSTS
7257 #define TARGET_RTX_COSTS mep_rtx_cost
7258 #undef TARGET_ADDRESS_COST
7259 #define TARGET_ADDRESS_COST mep_address_cost
7260 #undef TARGET_MACHINE_DEPENDENT_REORG
7261 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7262 #undef TARGET_SETUP_INCOMING_VARARGS
7263 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7264 #undef TARGET_PASS_BY_REFERENCE
7265 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7266 #undef TARGET_FUNCTION_ARG
7267 #define TARGET_FUNCTION_ARG mep_function_arg
7268 #undef TARGET_FUNCTION_ARG_ADVANCE
7269 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7270 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7271 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7272 #undef TARGET_OPTION_OVERRIDE
7273 #define TARGET_OPTION_OVERRIDE mep_option_override
7274 #undef TARGET_ALLOCATE_INITIAL_VALUE
7275 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7276 #undef TARGET_ASM_INIT_SECTIONS
7277 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7278 #undef TARGET_RETURN_IN_MEMORY
7279 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7280 #undef TARGET_NARROW_VOLATILE_BITFIELD
7281 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7282 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7283 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7284 #undef TARGET_BUILD_BUILTIN_VA_LIST
7285 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7286 #undef TARGET_EXPAND_BUILTIN_VA_START
7287 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7288 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7289 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7290 #undef TARGET_CAN_ELIMINATE
7291 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7292 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7293 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7294 #undef TARGET_TRAMPOLINE_INIT
7295 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7296 #undef TARGET_LEGITIMATE_CONSTANT_P
7297 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7298 #undef TARGET_CAN_USE_DOLOOP_P
7299 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7301 struct gcc_target targetm = TARGET_INITIALIZER;
7303 #include "gt-mep.h"