gcc/
[official-gcc.git] / gcc / config / mep / mep.c
blobdd92dadf0d6afe56ba268d0a8bd0c4e7f2086961
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "varasm.h"
31 #include "calls.h"
32 #include "stringpool.h"
33 #include "stor-layout.h"
34 #include "regs.h"
35 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "conditions.h"
38 #include "insn-flags.h"
39 #include "output.h"
40 #include "insn-attr.h"
41 #include "flags.h"
42 #include "recog.h"
43 #include "obstack.h"
44 #include "function.h"
45 #include "expmed.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "emit-rtl.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "except.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "reload.h"
55 #include "tm_p.h"
56 #include "diagnostic-core.h"
57 #include "target.h"
58 #include "langhooks.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "cfgrtl.h"
62 #include "cfganal.h"
63 #include "lcm.h"
64 #include "cfgbuild.h"
65 #include "cfgcleanup.h"
66 #include "predict.h"
67 #include "basic-block.h"
68 #include "df.h"
69 #include "tree-ssa-alias.h"
70 #include "internal-fn.h"
71 #include "gimple-fold.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "gimple.h"
75 #include "gimplify.h"
76 #include "opts.h"
77 #include "dumpfile.h"
78 #include "builtins.h"
79 #include "rtl-iter.h"
81 #include "target-def.h"
83 /* Structure of this file:
85 + Command Line Option Support
86 + Pattern support - constraints, predicates, expanders
87 + Reload Support
88 + Costs
89 + Functions to save and restore machine-specific function data.
90 + Frame/Epilog/Prolog Related
91 + Operand Printing
92 + Function args in registers
93 + Handle pipeline hazards
94 + Handle attributes
95 + Trampolines
96 + Machine-dependent Reorg
97 + Builtins. */
99 /* Symbol encodings:
101 Symbols are encoded as @ <char> . <name> where <char> is one of these:
103 b - based
104 t - tiny
105 n - near
106 f - far
107 i - io, near
108 I - io, far
109 c - cb (control bus) */
111 struct GTY(()) machine_function
113 int mep_frame_pointer_needed;
115 /* For varargs. */
116 int arg_regs_to_save;
117 int regsave_filler;
118 int frame_filler;
119 int frame_locked;
121 /* Records __builtin_return address. */
122 rtx eh_stack_adjust;
124 int reg_save_size;
125 int reg_save_slot[FIRST_PSEUDO_REGISTER];
126 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
128 /* 2 if the current function has an interrupt attribute, 1 if not, 0
129 if unknown. This is here because resource.c uses EPILOGUE_USES
130 which needs it. */
131 int interrupt_handler;
133 /* Likewise, for disinterrupt attribute. */
134 int disable_interrupts;
136 /* Number of doloop tags used so far. */
137 int doloop_tags;
139 /* True if the last tag was allocated to a doloop_end. */
140 bool doloop_tag_from_end;
142 /* True if reload changes $TP. */
143 bool reload_changes_tp;
145 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
146 We only set this if the function is an interrupt handler. */
147 int asms_without_operands;
150 #define MEP_CONTROL_REG(x) \
151 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
153 static GTY(()) section * based_section;
154 static GTY(()) section * tinybss_section;
155 static GTY(()) section * far_section;
156 static GTY(()) section * farbss_section;
157 static GTY(()) section * frodata_section;
158 static GTY(()) section * srodata_section;
160 static GTY(()) section * vtext_section;
161 static GTY(()) section * vftext_section;
162 static GTY(()) section * ftext_section;
164 static void mep_set_leaf_registers (int);
165 static bool symbol_p (rtx);
166 static bool symbolref_p (rtx);
167 static void encode_pattern_1 (rtx);
168 static void encode_pattern (rtx);
169 static bool const_in_range (rtx, int, int);
170 static void mep_rewrite_mult (rtx_insn *, rtx);
171 static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
172 static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
173 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
174 static bool move_needs_splitting (rtx, rtx, machine_mode);
175 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
176 static bool mep_nongeneral_reg (rtx);
177 static bool mep_general_copro_reg (rtx);
178 static bool mep_nonregister (rtx);
179 static struct machine_function* mep_init_machine_status (void);
180 static rtx mep_tp_rtx (void);
181 static rtx mep_gp_rtx (void);
182 static bool mep_interrupt_p (void);
183 static bool mep_disinterrupt_p (void);
184 static bool mep_reg_set_p (rtx, rtx);
185 static bool mep_reg_set_in_function (int);
186 static bool mep_interrupt_saved_reg (int);
187 static bool mep_call_saves_register (int);
188 static rtx_insn *F (rtx_insn *);
189 static void add_constant (int, int, int, int);
190 static rtx_insn *maybe_dead_move (rtx, rtx, bool);
191 static void mep_reload_pointer (int, const char *);
192 static void mep_start_function (FILE *, HOST_WIDE_INT);
193 static bool mep_function_ok_for_sibcall (tree, tree);
194 static int unique_bit_in (HOST_WIDE_INT);
195 static int bit_size_for_clip (HOST_WIDE_INT);
196 static int bytesize (const_tree, machine_mode);
197 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
198 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
199 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
200 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
201 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
202 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
203 static bool mep_function_attribute_inlinable_p (const_tree);
204 static bool mep_can_inline_p (tree, tree);
205 static bool mep_lookup_pragma_disinterrupt (const char *);
206 static int mep_multiple_address_regions (tree, bool);
207 static int mep_attrlist_to_encoding (tree, tree);
208 static void mep_insert_attributes (tree, tree *);
209 static void mep_encode_section_info (tree, rtx, int);
210 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
211 static void mep_unique_section (tree, int);
212 static unsigned int mep_section_type_flags (tree, const char *, int);
213 static void mep_asm_named_section (const char *, unsigned int, tree);
214 static bool mep_mentioned_p (rtx, rtx, int);
215 static void mep_reorg_regmove (rtx_insn *);
216 static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
217 bool, bool);
218 static void mep_reorg_repeat (rtx_insn *);
219 static bool mep_invertable_branch_p (rtx_insn *);
220 static void mep_invert_branch (rtx_insn *, rtx_insn *);
221 static void mep_reorg_erepeat (rtx_insn *);
222 static void mep_jmp_return_reorg (rtx_insn *);
223 static void mep_reorg_addcombine (rtx_insn *);
224 static void mep_reorg (void);
225 static void mep_init_intrinsics (void);
226 static void mep_init_builtins (void);
227 static void mep_intrinsic_unavailable (int);
228 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
229 static bool mep_get_move_insn (int, const struct cgen_insn **);
230 static rtx mep_convert_arg (machine_mode, rtx);
231 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
232 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
233 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
234 static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
235 static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
236 static int mep_issue_rate (void);
237 static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
238 static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
239 static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
240 static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
241 static void mep_bundle_insns (rtx_insn *);
242 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
243 static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
244 static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
245 tree, int *, int);
246 static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
247 const_tree, bool);
248 static rtx mep_function_arg (cumulative_args_t, machine_mode,
249 const_tree, bool);
250 static void mep_function_arg_advance (cumulative_args_t, machine_mode,
251 const_tree, bool);
252 static bool mep_vector_mode_supported_p (machine_mode);
253 static rtx mep_allocate_initial_value (rtx);
254 static void mep_asm_init_sections (void);
255 static int mep_comp_type_attributes (const_tree, const_tree);
256 static bool mep_narrow_volatile_bitfield (void);
257 static rtx mep_expand_builtin_saveregs (void);
258 static tree mep_build_builtin_va_list (void);
259 static void mep_expand_va_start (tree, rtx);
260 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
261 static bool mep_can_eliminate (const int, const int);
262 static void mep_conditional_register_usage (void);
263 static void mep_trampoline_init (rtx, tree, rtx);
265 #define WANT_GCC_DEFINITIONS
266 #include "mep-intrin.h"
267 #undef WANT_GCC_DEFINITIONS
270 /* Command Line Option Support. */
272 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
274 /* True if we can use cmov instructions to move values back and forth
275 between core and coprocessor registers. */
276 bool mep_have_core_copro_moves_p;
278 /* True if we can use cmov instructions (or a work-alike) to move
279 values between coprocessor registers. */
280 bool mep_have_copro_copro_moves_p;
282 /* A table of all coprocessor instructions that can act like
283 a coprocessor-to-coprocessor cmov. */
284 static const int mep_cmov_insns[] = {
285 mep_cmov,
286 mep_cpmov,
287 mep_fmovs,
288 mep_caddi3,
289 mep_csubi3,
290 mep_candi3,
291 mep_cori3,
292 mep_cxori3,
293 mep_cand3,
294 mep_cor3
298 static void
299 mep_set_leaf_registers (int enable)
301 int i;
303 if (mep_leaf_registers[0] != enable)
304 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
305 mep_leaf_registers[i] = enable;
308 static void
309 mep_conditional_register_usage (void)
311 int i;
313 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
315 fixed_regs[HI_REGNO] = 1;
316 fixed_regs[LO_REGNO] = 1;
317 call_used_regs[HI_REGNO] = 1;
318 call_used_regs[LO_REGNO] = 1;
321 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
322 global_regs[i] = 1;
325 static void
326 mep_option_override (void)
328 unsigned int i;
329 int j;
330 cl_deferred_option *opt;
331 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
333 if (v)
334 FOR_EACH_VEC_ELT (*v, i, opt)
336 switch (opt->opt_index)
338 case OPT_mivc2:
339 for (j = 0; j < 32; j++)
340 fixed_regs[j + 48] = 0;
341 for (j = 0; j < 32; j++)
342 call_used_regs[j + 48] = 1;
343 for (j = 6; j < 8; j++)
344 call_used_regs[j + 48] = 0;
346 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
347 RN (0, "$csar0");
348 RN (1, "$cc");
349 RN (4, "$cofr0");
350 RN (5, "$cofr1");
351 RN (6, "$cofa0");
352 RN (7, "$cofa1");
353 RN (15, "$csar1");
355 RN (16, "$acc0_0");
356 RN (17, "$acc0_1");
357 RN (18, "$acc0_2");
358 RN (19, "$acc0_3");
359 RN (20, "$acc0_4");
360 RN (21, "$acc0_5");
361 RN (22, "$acc0_6");
362 RN (23, "$acc0_7");
364 RN (24, "$acc1_0");
365 RN (25, "$acc1_1");
366 RN (26, "$acc1_2");
367 RN (27, "$acc1_3");
368 RN (28, "$acc1_4");
369 RN (29, "$acc1_5");
370 RN (30, "$acc1_6");
371 RN (31, "$acc1_7");
372 #undef RN
373 break;
375 default:
376 gcc_unreachable ();
380 if (flag_pic == 1)
381 warning (OPT_fpic, "-fpic is not supported");
382 if (flag_pic == 2)
383 warning (OPT_fPIC, "-fPIC is not supported");
384 if (TARGET_S && TARGET_M)
385 error ("only one of -ms and -mm may be given");
386 if (TARGET_S && TARGET_L)
387 error ("only one of -ms and -ml may be given");
388 if (TARGET_M && TARGET_L)
389 error ("only one of -mm and -ml may be given");
390 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
391 error ("only one of -ms and -mtiny= may be given");
392 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
393 error ("only one of -mm and -mtiny= may be given");
394 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
395 warning (0, "-mclip currently has no effect without -mminmax");
397 if (mep_const_section)
399 if (strcmp (mep_const_section, "tiny") != 0
400 && strcmp (mep_const_section, "near") != 0
401 && strcmp (mep_const_section, "far") != 0)
402 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
405 if (TARGET_S)
406 mep_tiny_cutoff = 65536;
407 if (TARGET_M)
408 mep_tiny_cutoff = 0;
409 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
410 mep_tiny_cutoff = 0;
412 if (TARGET_64BIT_CR_REGS)
413 flag_split_wide_types = 0;
415 init_machine_status = mep_init_machine_status;
416 mep_init_intrinsics ();
419 /* Pattern Support - constraints, predicates, expanders. */
421 /* MEP has very few instructions that can refer to the span of
422 addresses used by symbols, so it's common to check for them. */
424 static bool
425 symbol_p (rtx x)
427 int c = GET_CODE (x);
429 return (c == CONST_INT
430 || c == CONST
431 || c == SYMBOL_REF);
434 static bool
435 symbolref_p (rtx x)
437 int c;
439 if (GET_CODE (x) != MEM)
440 return false;
442 c = GET_CODE (XEXP (x, 0));
443 return (c == CONST_INT
444 || c == CONST
445 || c == SYMBOL_REF);
448 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
450 #define GEN_REG(R, STRICT) \
451 (GR_REGNO_P (R) \
452 || (!STRICT \
453 && ((R) == ARG_POINTER_REGNUM \
454 || (R) >= FIRST_PSEUDO_REGISTER)))
456 static char pattern[12], *patternp;
457 static GTY(()) rtx patternr[12];
458 #define RTX_IS(x) (strcmp (pattern, x) == 0)
460 static void
461 encode_pattern_1 (rtx x)
463 int i;
465 if (patternp == pattern + sizeof (pattern) - 2)
467 patternp[-1] = '?';
468 return;
471 patternr[patternp-pattern] = x;
473 switch (GET_CODE (x))
475 case REG:
476 *patternp++ = 'r';
477 break;
478 case MEM:
479 *patternp++ = 'm';
480 case CONST:
481 encode_pattern_1 (XEXP(x, 0));
482 break;
483 case PLUS:
484 *patternp++ = '+';
485 encode_pattern_1 (XEXP(x, 0));
486 encode_pattern_1 (XEXP(x, 1));
487 break;
488 case LO_SUM:
489 *patternp++ = 'L';
490 encode_pattern_1 (XEXP(x, 0));
491 encode_pattern_1 (XEXP(x, 1));
492 break;
493 case HIGH:
494 *patternp++ = 'H';
495 encode_pattern_1 (XEXP(x, 0));
496 break;
497 case SYMBOL_REF:
498 *patternp++ = 's';
499 break;
500 case LABEL_REF:
501 *patternp++ = 'l';
502 break;
503 case CONST_INT:
504 case CONST_DOUBLE:
505 *patternp++ = 'i';
506 break;
507 case UNSPEC:
508 *patternp++ = 'u';
509 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
510 for (i=0; i<XVECLEN (x, 0); i++)
511 encode_pattern_1 (XVECEXP (x, 0, i));
512 break;
513 case USE:
514 *patternp++ = 'U';
515 break;
516 default:
517 *patternp++ = '?';
518 #if 0
519 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
520 debug_rtx (x);
521 gcc_unreachable ();
522 #endif
523 break;
527 static void
528 encode_pattern (rtx x)
530 patternp = pattern;
531 encode_pattern_1 (x);
532 *patternp = 0;
536 mep_section_tag (rtx x)
538 const char *name;
540 while (1)
542 switch (GET_CODE (x))
544 case MEM:
545 case CONST:
546 x = XEXP (x, 0);
547 break;
548 case UNSPEC:
549 x = XVECEXP (x, 0, 0);
550 break;
551 case PLUS:
552 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
553 return 0;
554 x = XEXP (x, 0);
555 break;
556 default:
557 goto done;
560 done:
561 if (GET_CODE (x) != SYMBOL_REF)
562 return 0;
563 name = XSTR (x, 0);
564 if (name[0] == '@' && name[2] == '.')
566 if (name[1] == 'i' || name[1] == 'I')
568 if (name[1] == 'I')
569 return 'f'; /* near */
570 return 'n'; /* far */
572 return name[1];
574 return 0;
578 mep_regno_reg_class (int regno)
580 switch (regno)
582 case SP_REGNO: return SP_REGS;
583 case TP_REGNO: return TP_REGS;
584 case GP_REGNO: return GP_REGS;
585 case 0: return R0_REGS;
586 case HI_REGNO: return HI_REGS;
587 case LO_REGNO: return LO_REGS;
588 case ARG_POINTER_REGNUM: return GENERAL_REGS;
591 if (GR_REGNO_P (regno))
592 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
593 if (CONTROL_REGNO_P (regno))
594 return CONTROL_REGS;
596 if (CR_REGNO_P (regno))
598 int i, j;
600 /* Search for the register amongst user-defined subclasses of
601 the coprocessor registers. */
602 for (i = USER0_REGS; i <= USER3_REGS; ++i)
604 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
605 continue;
606 for (j = 0; j < N_REG_CLASSES; ++j)
608 enum reg_class sub = reg_class_subclasses[i][j];
610 if (sub == LIM_REG_CLASSES)
611 return i;
612 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
613 break;
617 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
620 if (CCR_REGNO_P (regno))
621 return CCR_REGS;
623 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
624 return NO_REGS;
627 static bool
628 const_in_range (rtx x, int minv, int maxv)
630 return (GET_CODE (x) == CONST_INT
631 && INTVAL (x) >= minv
632 && INTVAL (x) <= maxv);
635 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
636 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
637 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
638 at the end of the insn stream. */
641 mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
643 if (rtx_equal_p (dest, src1))
644 return src2;
645 else if (rtx_equal_p (dest, src2))
646 return src1;
647 else
649 if (insn == 0)
650 emit_insn (gen_movsi (copy_rtx (dest), src1));
651 else
652 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
653 return src2;
657 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
658 Change the last element of PATTERN from (clobber (scratch:SI))
659 to (clobber (reg:SI HI_REGNO)). */
661 static void
662 mep_rewrite_mult (rtx_insn *insn, rtx pattern)
664 rtx hi_clobber;
666 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
667 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
668 PATTERN (insn) = pattern;
669 INSN_CODE (insn) = -1;
672 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
673 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
674 store the result in DEST if nonnull. */
676 static void
677 mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
679 rtx lo, pattern;
681 lo = gen_rtx_REG (SImode, LO_REGNO);
682 if (dest)
683 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
684 mep_mulr_source (insn, dest, src1, src2));
685 else
686 pattern = gen_mulsi3_lo (lo, src1, src2);
687 mep_rewrite_mult (insn, pattern);
690 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
691 SRC3 into $lo, then use either madd or maddr. The move into $lo will
692 be deleted by a peephole2 if SRC3 is already in $lo. */
694 static void
695 mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
697 rtx lo, pattern;
699 lo = gen_rtx_REG (SImode, LO_REGNO);
700 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
701 if (dest)
702 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
703 mep_mulr_source (insn, dest, src1, src2),
704 copy_rtx (lo));
705 else
706 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
707 mep_rewrite_mult (insn, pattern);
710 /* Return true if $lo has the same value as integer register GPR when
711 instruction INSN is reached. If necessary, rewrite the instruction
712 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
713 rtx for (reg:SI LO_REGNO).
715 This function is intended to be used by the peephole2 pass. Since
716 that pass goes from the end of a basic block to the beginning, and
717 propagates liveness information on the way, there is no need to
718 update register notes here.
720 If GPR_DEAD_P is true on entry, and this function returns true,
721 then the caller will replace _every_ use of GPR in and after INSN
722 with LO. This means that if the instruction that sets $lo is a
723 mulr- or maddr-type instruction, we can rewrite it to use mul or
724 madd instead. In combination with the copy progagation pass,
725 this allows us to replace sequences like:
727 mov GPR,R1
728 mulr GPR,R2
730 with:
732 mul R1,R2
734 if GPR is no longer used. */
736 static bool
737 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
741 insn = PREV_INSN (insn);
742 if (INSN_P (insn))
743 switch (recog_memoized (insn))
745 case CODE_FOR_mulsi3_1:
746 extract_insn (insn);
747 if (rtx_equal_p (recog_data.operand[0], gpr))
749 mep_rewrite_mulsi3 (insn,
750 gpr_dead_p ? NULL : recog_data.operand[0],
751 recog_data.operand[1],
752 recog_data.operand[2]);
753 return true;
755 return false;
757 case CODE_FOR_maddsi3:
758 extract_insn (insn);
759 if (rtx_equal_p (recog_data.operand[0], gpr))
761 mep_rewrite_maddsi3 (insn,
762 gpr_dead_p ? NULL : recog_data.operand[0],
763 recog_data.operand[1],
764 recog_data.operand[2],
765 recog_data.operand[3]);
766 return true;
768 return false;
770 case CODE_FOR_mulsi3r:
771 case CODE_FOR_maddsi3r:
772 extract_insn (insn);
773 return rtx_equal_p (recog_data.operand[1], gpr);
775 default:
776 if (reg_set_p (lo, insn)
777 || reg_set_p (gpr, insn)
778 || volatile_insn_p (PATTERN (insn)))
779 return false;
781 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
782 gpr_dead_p = false;
783 break;
786 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
787 return false;
790 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
792 bool
793 mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
795 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
796 extract_insn (insn);
797 return result;
800 /* Return true if SET can be turned into a post-modify load or store
801 that adds OFFSET to GPR. In other words, return true if SET can be
802 changed into:
804 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
806 It's OK to change SET to an equivalent operation in order to
807 make it match. */
809 static bool
810 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
812 rtx *reg, *mem;
813 unsigned int reg_bytes, mem_bytes;
814 machine_mode reg_mode, mem_mode;
816 /* Only simple SETs can be converted. */
817 if (GET_CODE (set) != SET)
818 return false;
820 /* Point REG to what we hope will be the register side of the set and
821 MEM to what we hope will be the memory side. */
822 if (GET_CODE (SET_DEST (set)) == MEM)
824 mem = &SET_DEST (set);
825 reg = &SET_SRC (set);
827 else
829 reg = &SET_DEST (set);
830 mem = &SET_SRC (set);
831 if (GET_CODE (*mem) == SIGN_EXTEND)
832 mem = &XEXP (*mem, 0);
835 /* Check that *REG is a suitable coprocessor register. */
836 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
837 return false;
839 /* Check that *MEM is a suitable memory reference. */
840 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
841 return false;
843 /* Get the number of bytes in each operand. */
844 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
845 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
847 /* Check that OFFSET is suitably aligned. */
848 if (INTVAL (offset) & (mem_bytes - 1))
849 return false;
851 /* Convert *MEM to a normal integer mode. */
852 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
853 *mem = change_address (*mem, mem_mode, NULL);
855 /* Adjust *REG as well. */
856 *reg = shallow_copy_rtx (*reg);
857 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
859 /* SET is a subword load. Convert it to an explicit extension. */
860 PUT_MODE (*reg, SImode);
861 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
863 else
865 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
866 PUT_MODE (*reg, reg_mode);
868 return true;
871 /* Return the effect of frame-related instruction INSN. */
873 static rtx
874 mep_frame_expr (rtx_insn *insn)
876 rtx note, expr;
878 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
879 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
880 RTX_FRAME_RELATED_P (expr) = 1;
881 return expr;
884 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
885 new pattern in INSN1; INSN2 will be deleted by the caller. */
887 static void
888 mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
890 rtx expr;
892 if (RTX_FRAME_RELATED_P (insn2))
894 expr = mep_frame_expr (insn2);
895 if (RTX_FRAME_RELATED_P (insn1))
896 expr = gen_rtx_SEQUENCE (VOIDmode,
897 gen_rtvec (2, mep_frame_expr (insn1), expr));
898 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
899 RTX_FRAME_RELATED_P (insn1) = 1;
902 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
903 gen_rtvec (2, PATTERN (insn1),
904 PATTERN (insn2)));
905 INSN_CODE (insn1) = -1;
908 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
909 the basic block to see if any previous load or store instruction can
910 be persuaded to do SET_INSN as a side-effect. Return true if so. */
912 static bool
913 mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
915 rtx_insn *insn;
917 insn = set_insn;
920 insn = PREV_INSN (insn);
921 if (INSN_P (insn))
923 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
925 mep_make_parallel (insn, set_insn);
926 return true;
929 if (reg_set_p (reg, insn)
930 || reg_referenced_p (reg, PATTERN (insn))
931 || volatile_insn_p (PATTERN (insn)))
932 return false;
935 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
936 return false;
939 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
941 bool
942 mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
944 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
945 extract_insn (insn);
946 return result;
949 bool
950 mep_allow_clip (rtx ux, rtx lx, int s)
952 HOST_WIDE_INT u = INTVAL (ux);
953 HOST_WIDE_INT l = INTVAL (lx);
954 int i;
956 if (!TARGET_OPT_CLIP)
957 return false;
959 if (s)
961 for (i = 0; i < 30; i ++)
962 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
963 && (l == - ((HOST_WIDE_INT) 1 << i)))
964 return true;
966 else
968 if (l != 0)
969 return false;
971 for (i = 0; i < 30; i ++)
972 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
973 return true;
975 return false;
978 bool
979 mep_bit_position_p (rtx x, bool looking_for)
981 if (GET_CODE (x) != CONST_INT)
982 return false;
983 switch ((int) INTVAL(x) & 0xff)
985 case 0x01: case 0x02: case 0x04: case 0x08:
986 case 0x10: case 0x20: case 0x40: case 0x80:
987 return looking_for;
988 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
989 case 0xef: case 0xdf: case 0xbf: case 0x7f:
990 return !looking_for;
992 return false;
995 static bool
996 move_needs_splitting (rtx dest, rtx src,
997 machine_mode mode ATTRIBUTE_UNUSED)
999 int s = mep_section_tag (src);
1001 while (1)
1003 if (GET_CODE (src) == CONST
1004 || GET_CODE (src) == MEM)
1005 src = XEXP (src, 0);
1006 else if (GET_CODE (src) == SYMBOL_REF
1007 || GET_CODE (src) == LABEL_REF
1008 || GET_CODE (src) == PLUS)
1009 break;
1010 else
1011 return false;
1013 if (s == 'f'
1014 || (GET_CODE (src) == PLUS
1015 && GET_CODE (XEXP (src, 1)) == CONST_INT
1016 && (INTVAL (XEXP (src, 1)) < -65536
1017 || INTVAL (XEXP (src, 1)) > 0xffffff))
1018 || (GET_CODE (dest) == REG
1019 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1020 return true;
1021 return false;
1024 bool
1025 mep_split_mov (rtx *operands, int symbolic)
1027 if (symbolic)
1029 if (move_needs_splitting (operands[0], operands[1], SImode))
1030 return true;
1031 return false;
1034 if (GET_CODE (operands[1]) != CONST_INT)
1035 return false;
1037 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1038 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1039 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1040 return false;
1042 if (((!reload_completed && !reload_in_progress)
1043 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1044 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1045 return false;
1047 return true;
1050 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1051 it to one specific value. So the insn chosen depends on whether
1052 the source and destination modes match. */
1054 bool
1055 mep_vliw_mode_match (rtx tgt)
1057 bool src_vliw = mep_vliw_function_p (cfun->decl);
1058 bool tgt_vliw = INTVAL (tgt);
1060 return src_vliw == tgt_vliw;
1063 /* Like the above, but also test for near/far mismatches. */
1065 bool
1066 mep_vliw_jmp_match (rtx tgt)
1068 bool src_vliw = mep_vliw_function_p (cfun->decl);
1069 bool tgt_vliw = INTVAL (tgt);
1071 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1072 return false;
1074 return src_vliw == tgt_vliw;
1077 bool
1078 mep_multi_slot (rtx_insn *x)
1080 return get_attr_slot (x) == SLOT_MULTI;
1083 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1085 static bool
1086 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1088 /* We can't convert symbol values to gp- or tp-rel values after
1089 reload, as reload might have used $gp or $tp for other
1090 purposes. */
1091 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1093 char e = mep_section_tag (x);
1094 return (e != 't' && e != 'b');
1096 return 1;
1099 /* Be careful not to use macros that need to be compiled one way for
1100 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1102 bool
1103 mep_legitimate_address (machine_mode mode, rtx x, int strict)
1105 int the_tag;
1107 #define DEBUG_LEGIT 0
1108 #if DEBUG_LEGIT
1109 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1110 debug_rtx (x);
1111 #endif
1113 if (GET_CODE (x) == LO_SUM
1114 && GET_CODE (XEXP (x, 0)) == REG
1115 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1116 && CONSTANT_P (XEXP (x, 1)))
1118 if (GET_MODE_SIZE (mode) > 4)
1120 /* We will end up splitting this, and lo_sums are not
1121 offsettable for us. */
1122 #if DEBUG_LEGIT
1123 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1124 #endif
1125 return false;
1127 #if DEBUG_LEGIT
1128 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1129 #endif
1130 return true;
1133 if (GET_CODE (x) == REG
1134 && GEN_REG (REGNO (x), strict))
1136 #if DEBUG_LEGIT
1137 fprintf (stderr, " - yup, [reg]\n");
1138 #endif
1139 return true;
1142 if (GET_CODE (x) == PLUS
1143 && GET_CODE (XEXP (x, 0)) == REG
1144 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1145 && const_in_range (XEXP (x, 1), -32768, 32767))
1147 #if DEBUG_LEGIT
1148 fprintf (stderr, " - yup, [reg+const]\n");
1149 #endif
1150 return true;
1153 if (GET_CODE (x) == PLUS
1154 && GET_CODE (XEXP (x, 0)) == REG
1155 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1156 && GET_CODE (XEXP (x, 1)) == CONST
1157 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1158 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1159 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1160 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1162 #if DEBUG_LEGIT
1163 fprintf (stderr, " - yup, [reg+unspec]\n");
1164 #endif
1165 return true;
1168 the_tag = mep_section_tag (x);
1170 if (the_tag == 'f')
1172 #if DEBUG_LEGIT
1173 fprintf (stderr, " - nope, [far]\n");
1174 #endif
1175 return false;
1178 if (mode == VOIDmode
1179 && GET_CODE (x) == SYMBOL_REF)
1181 #if DEBUG_LEGIT
1182 fprintf (stderr, " - yup, call [symbol]\n");
1183 #endif
1184 return true;
1187 if ((mode == SImode || mode == SFmode)
1188 && CONSTANT_P (x)
1189 && mep_legitimate_constant_p (mode, x)
1190 && the_tag != 't' && the_tag != 'b')
1192 if (GET_CODE (x) != CONST_INT
1193 || (INTVAL (x) <= 0xfffff
1194 && INTVAL (x) >= 0
1195 && (INTVAL (x) % 4) == 0))
1197 #if DEBUG_LEGIT
1198 fprintf (stderr, " - yup, [const]\n");
1199 #endif
1200 return true;
1204 #if DEBUG_LEGIT
1205 fprintf (stderr, " - nope.\n");
1206 #endif
1207 return false;
1211 mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
1212 int type_i,
1213 int ind_levels ATTRIBUTE_UNUSED)
1215 enum reload_type type = (enum reload_type) type_i;
1217 if (GET_CODE (*x) == PLUS
1218 && GET_CODE (XEXP (*x, 0)) == MEM
1219 && GET_CODE (XEXP (*x, 1)) == REG)
1221 /* GCC will by default copy the MEM into a REG, which results in
1222 an invalid address. For us, the best thing to do is move the
1223 whole expression to a REG. */
1224 push_reload (*x, NULL_RTX, x, NULL,
1225 GENERAL_REGS, mode, VOIDmode,
1226 0, 0, opnum, type);
1227 return 1;
1230 if (GET_CODE (*x) == PLUS
1231 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1232 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1234 char e = mep_section_tag (XEXP (*x, 0));
1236 if (e != 't' && e != 'b')
1238 /* GCC thinks that (sym+const) is a valid address. Well,
1239 sometimes it is, this time it isn't. The best thing to
1240 do is reload the symbol to a register, since reg+int
1241 tends to work, and we can't just add the symbol and
1242 constant anyway. */
1243 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1244 GENERAL_REGS, mode, VOIDmode,
1245 0, 0, opnum, type);
1246 return 1;
1249 return 0;
1253 mep_core_address_length (rtx_insn *insn, int opn)
1255 rtx set = single_set (insn);
1256 rtx mem = XEXP (set, opn);
1257 rtx other = XEXP (set, 1-opn);
1258 rtx addr = XEXP (mem, 0);
1260 if (register_operand (addr, Pmode))
1261 return 2;
1262 if (GET_CODE (addr) == PLUS)
1264 rtx addend = XEXP (addr, 1);
1266 gcc_assert (REG_P (XEXP (addr, 0)));
1268 switch (REGNO (XEXP (addr, 0)))
1270 case STACK_POINTER_REGNUM:
1271 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1272 && mep_imm7a4_operand (addend, VOIDmode))
1273 return 2;
1274 break;
1276 case 13: /* TP */
1277 gcc_assert (REG_P (other));
1279 if (REGNO (other) >= 8)
1280 break;
1282 if (GET_CODE (addend) == CONST
1283 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1284 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1285 return 2;
1287 if (GET_CODE (addend) == CONST_INT
1288 && INTVAL (addend) >= 0
1289 && INTVAL (addend) <= 127
1290 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1291 return 2;
1292 break;
1296 return 4;
1300 mep_cop_address_length (rtx_insn *insn, int opn)
1302 rtx set = single_set (insn);
1303 rtx mem = XEXP (set, opn);
1304 rtx addr = XEXP (mem, 0);
1306 if (GET_CODE (mem) != MEM)
1307 return 2;
1308 if (register_operand (addr, Pmode))
1309 return 2;
1310 if (GET_CODE (addr) == POST_INC)
1311 return 2;
1313 return 4;
1316 #define DEBUG_EXPAND_MOV 0
1317 bool
1318 mep_expand_mov (rtx *operands, machine_mode mode)
1320 int i, t;
1321 int tag[2];
1322 rtx tpsym, tpoffs;
1323 int post_reload = 0;
1325 tag[0] = mep_section_tag (operands[0]);
1326 tag[1] = mep_section_tag (operands[1]);
1328 if (!reload_in_progress
1329 && !reload_completed
1330 && GET_CODE (operands[0]) != REG
1331 && GET_CODE (operands[0]) != SUBREG
1332 && GET_CODE (operands[1]) != REG
1333 && GET_CODE (operands[1]) != SUBREG)
1334 operands[1] = copy_to_mode_reg (mode, operands[1]);
1336 #if DEBUG_EXPAND_MOV
1337 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1338 reload_in_progress || reload_completed);
1339 debug_rtx (operands[0]);
1340 debug_rtx (operands[1]);
1341 #endif
1343 if (mode == DImode || mode == DFmode)
1344 return false;
1346 if (reload_in_progress || reload_completed)
1348 rtx r;
1350 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1351 cfun->machine->reload_changes_tp = true;
1353 if (tag[0] == 't' || tag[1] == 't')
1355 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1356 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1357 post_reload = 1;
1359 if (tag[0] == 'b' || tag[1] == 'b')
1361 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1362 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1363 post_reload = 1;
1365 if (cfun->machine->reload_changes_tp == true)
1366 post_reload = 1;
1369 if (!post_reload)
1371 rtx n;
1372 if (symbol_p (operands[1]))
1374 t = mep_section_tag (operands[1]);
1375 if (t == 'b' || t == 't')
1378 if (GET_CODE (operands[1]) == SYMBOL_REF)
1380 tpsym = operands[1];
1381 n = gen_rtx_UNSPEC (mode,
1382 gen_rtvec (1, operands[1]),
1383 t == 'b' ? UNS_TPREL : UNS_GPREL);
1384 n = gen_rtx_CONST (mode, n);
1386 else if (GET_CODE (operands[1]) == CONST
1387 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1388 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1389 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1391 tpsym = XEXP (XEXP (operands[1], 0), 0);
1392 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1393 n = gen_rtx_UNSPEC (mode,
1394 gen_rtvec (1, tpsym),
1395 t == 'b' ? UNS_TPREL : UNS_GPREL);
1396 n = gen_rtx_PLUS (mode, n, tpoffs);
1397 n = gen_rtx_CONST (mode, n);
1399 else if (GET_CODE (operands[1]) == CONST
1400 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1401 return false;
1402 else
1404 error ("unusual TP-relative address");
1405 return false;
1408 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1409 : mep_gp_rtx ()), n);
1410 n = emit_insn (gen_rtx_SET (operands[0], n));
1411 #if DEBUG_EXPAND_MOV
1412 fprintf(stderr, "mep_expand_mov emitting ");
1413 debug_rtx(n);
1414 #endif
1415 return true;
1419 for (i=0; i < 2; i++)
1421 t = mep_section_tag (operands[i]);
1422 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1424 rtx sym, n, r;
1425 int u;
1427 sym = XEXP (operands[i], 0);
1428 if (GET_CODE (sym) == CONST
1429 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1430 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1432 if (t == 'b')
1434 r = mep_tp_rtx ();
1435 u = UNS_TPREL;
1437 else
1439 r = mep_gp_rtx ();
1440 u = UNS_GPREL;
1443 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1444 n = gen_rtx_CONST (Pmode, n);
1445 n = gen_rtx_PLUS (Pmode, r, n);
1446 operands[i] = replace_equiv_address (operands[i], n);
1451 if ((GET_CODE (operands[1]) != REG
1452 && MEP_CONTROL_REG (operands[0]))
1453 || (GET_CODE (operands[0]) != REG
1454 && MEP_CONTROL_REG (operands[1])))
1456 rtx temp;
1457 #if DEBUG_EXPAND_MOV
1458 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1459 #endif
1460 temp = gen_reg_rtx (mode);
1461 emit_move_insn (temp, operands[1]);
1462 operands[1] = temp;
1465 if (symbolref_p (operands[0])
1466 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1467 || (GET_MODE_SIZE (mode) != 4)))
1469 rtx temp;
1471 gcc_assert (!reload_in_progress && !reload_completed);
1473 temp = force_reg (Pmode, XEXP (operands[0], 0));
1474 operands[0] = replace_equiv_address (operands[0], temp);
1475 emit_move_insn (operands[0], operands[1]);
1476 return true;
1479 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1480 tag[1] = 0;
1482 if (symbol_p (operands[1])
1483 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1485 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1486 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1487 return true;
1490 if (symbolref_p (operands[1])
1491 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1493 rtx temp;
1495 if (reload_in_progress || reload_completed)
1496 temp = operands[0];
1497 else
1498 temp = gen_reg_rtx (Pmode);
1500 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1501 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1502 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1503 return true;
1506 return false;
1509 /* Cases where the pattern can't be made to use at all. */
1511 bool
1512 mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
1514 int i;
1516 #define DEBUG_MOV_OK 0
1517 #if DEBUG_MOV_OK
1518 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1519 mep_section_tag (operands[1]));
1520 debug_rtx (operands[0]);
1521 debug_rtx (operands[1]);
1522 #endif
1524 /* We want the movh patterns to get these. */
1525 if (GET_CODE (operands[1]) == HIGH)
1526 return false;
1528 /* We can't store a register to a far variable without using a
1529 scratch register to hold the address. Using far variables should
1530 be split by mep_emit_mov anyway. */
1531 if (mep_section_tag (operands[0]) == 'f'
1532 || mep_section_tag (operands[1]) == 'f')
1534 #if DEBUG_MOV_OK
1535 fprintf (stderr, " - no, f\n");
1536 #endif
1537 return false;
1539 i = mep_section_tag (operands[1]);
1540 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1541 /* These are supposed to be generated with adds of the appropriate
1542 register. During and after reload, however, we allow them to
1543 be accessed as normal symbols because adding a dependency on
1544 the base register now might cause problems. */
1546 #if DEBUG_MOV_OK
1547 fprintf (stderr, " - no, bt\n");
1548 #endif
1549 return false;
1552 /* The only moves we can allow involve at least one general
1553 register, so require it. */
1554 for (i = 0; i < 2; i ++)
1556 /* Allow subregs too, before reload. */
1557 rtx x = operands[i];
1559 if (GET_CODE (x) == SUBREG)
1560 x = XEXP (x, 0);
1561 if (GET_CODE (x) == REG
1562 && ! MEP_CONTROL_REG (x))
1564 #if DEBUG_MOV_OK
1565 fprintf (stderr, " - ok\n");
1566 #endif
1567 return true;
1570 #if DEBUG_MOV_OK
1571 fprintf (stderr, " - no, no gen reg\n");
1572 #endif
1573 return false;
1576 #define DEBUG_SPLIT_WIDE_MOVE 0
1577 void
1578 mep_split_wide_move (rtx *operands, machine_mode mode)
1580 int i;
1582 #if DEBUG_SPLIT_WIDE_MOVE
1583 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1584 debug_rtx (operands[0]);
1585 debug_rtx (operands[1]);
1586 #endif
1588 for (i = 0; i <= 1; i++)
1590 rtx op = operands[i], hi, lo;
1592 switch (GET_CODE (op))
1594 case REG:
1596 unsigned int regno = REGNO (op);
1598 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1600 rtx i32;
1602 lo = gen_rtx_REG (SImode, regno);
1603 i32 = GEN_INT (32);
1604 hi = gen_rtx_ZERO_EXTRACT (SImode,
1605 gen_rtx_REG (DImode, regno),
1606 i32, i32);
1608 else
1610 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1611 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1614 break;
1616 case CONST_INT:
1617 case CONST_DOUBLE:
1618 case MEM:
1619 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1620 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1621 break;
1623 default:
1624 gcc_unreachable ();
1627 /* The high part of CR <- GPR moves must be done after the low part. */
1628 operands [i + 4] = lo;
1629 operands [i + 2] = hi;
1632 if (reg_mentioned_p (operands[2], operands[5])
1633 || GET_CODE (operands[2]) == ZERO_EXTRACT
1634 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1636 rtx tmp;
1638 /* Overlapping register pairs -- make sure we don't
1639 early-clobber ourselves. */
1640 tmp = operands[2];
1641 operands[2] = operands[4];
1642 operands[4] = tmp;
1643 tmp = operands[3];
1644 operands[3] = operands[5];
1645 operands[5] = tmp;
1648 #if DEBUG_SPLIT_WIDE_MOVE
1649 fprintf(stderr, "\033[34m");
1650 debug_rtx (operands[2]);
1651 debug_rtx (operands[3]);
1652 debug_rtx (operands[4]);
1653 debug_rtx (operands[5]);
1654 fprintf(stderr, "\033[0m");
1655 #endif
1658 /* Emit a setcc instruction in its entirity. */
1660 static bool
1661 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1663 rtx tmp;
1665 switch (code)
1667 case GT:
1668 case GTU:
1669 tmp = op1, op1 = op2, op2 = tmp;
1670 code = swap_condition (code);
1671 /* FALLTHRU */
1673 case LT:
1674 case LTU:
1675 op1 = force_reg (SImode, op1);
1676 emit_insn (gen_rtx_SET (dest, gen_rtx_fmt_ee (code, SImode, op1, op2)));
1677 return true;
1679 case EQ:
1680 if (op2 != const0_rtx)
1681 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1682 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1683 return true;
1685 case NE:
1686 /* Branchful sequence:
1687 mov dest, 0 16-bit
1688 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1689 mov dest, 1 16-bit
1691 Branchless sequence:
1692 add3 tmp, op1, -op2 32-bit (or mov + sub)
1693 sltu3 tmp, tmp, 1 16-bit
1694 xor3 dest, tmp, 1 32-bit
1696 if (optimize_size && op2 != const0_rtx)
1697 return false;
1699 if (op2 != const0_rtx)
1700 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1702 op2 = gen_reg_rtx (SImode);
1703 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1705 emit_insn (gen_rtx_SET (dest, gen_rtx_XOR (SImode, op2, const1_rtx)));
1706 return true;
1708 case LE:
1709 if (GET_CODE (op2) != CONST_INT
1710 || INTVAL (op2) == 0x7ffffff)
1711 return false;
1712 op2 = GEN_INT (INTVAL (op2) + 1);
1713 return mep_expand_setcc_1 (LT, dest, op1, op2);
1715 case LEU:
1716 if (GET_CODE (op2) != CONST_INT
1717 || INTVAL (op2) == -1)
1718 return false;
1719 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1720 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1722 case GE:
1723 if (GET_CODE (op2) != CONST_INT
1724 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1725 return false;
1726 op2 = GEN_INT (INTVAL (op2) - 1);
1727 return mep_expand_setcc_1 (GT, dest, op1, op2);
1729 case GEU:
1730 if (GET_CODE (op2) != CONST_INT
1731 || op2 == const0_rtx)
1732 return false;
1733 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1734 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1736 default:
1737 gcc_unreachable ();
1741 bool
1742 mep_expand_setcc (rtx *operands)
1744 rtx dest = operands[0];
1745 enum rtx_code code = GET_CODE (operands[1]);
1746 rtx op0 = operands[2];
1747 rtx op1 = operands[3];
1749 return mep_expand_setcc_1 (code, dest, op0, op1);
1753 mep_expand_cbranch (rtx *operands)
1755 enum rtx_code code = GET_CODE (operands[0]);
1756 rtx op0 = operands[1];
1757 rtx op1 = operands[2];
1758 rtx tmp;
1760 restart:
1761 switch (code)
1763 case LT:
1764 if (mep_imm4_operand (op1, SImode))
1765 break;
1767 tmp = gen_reg_rtx (SImode);
1768 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1769 code = NE;
1770 op0 = tmp;
1771 op1 = const0_rtx;
1772 break;
1774 case GE:
1775 if (mep_imm4_operand (op1, SImode))
1776 break;
1778 tmp = gen_reg_rtx (SImode);
1779 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1781 code = EQ;
1782 op0 = tmp;
1783 op1 = const0_rtx;
1784 break;
1786 case EQ:
1787 case NE:
1788 if (! mep_reg_or_imm4_operand (op1, SImode))
1789 op1 = force_reg (SImode, op1);
1790 break;
1792 case LE:
1793 case GT:
1794 if (GET_CODE (op1) == CONST_INT
1795 && INTVAL (op1) != 0x7fffffff)
1797 op1 = GEN_INT (INTVAL (op1) + 1);
1798 code = (code == LE ? LT : GE);
1799 goto restart;
1802 tmp = gen_reg_rtx (SImode);
1803 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1805 code = (code == LE ? EQ : NE);
1806 op0 = tmp;
1807 op1 = const0_rtx;
1808 break;
1810 case LTU:
1811 if (op1 == const1_rtx)
1813 code = EQ;
1814 op1 = const0_rtx;
1815 break;
1818 tmp = gen_reg_rtx (SImode);
1819 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1820 code = NE;
1821 op0 = tmp;
1822 op1 = const0_rtx;
1823 break;
1825 case LEU:
1826 tmp = gen_reg_rtx (SImode);
1827 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1828 code = NE;
1829 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1830 code = EQ;
1831 else
1832 gcc_unreachable ();
1833 op0 = tmp;
1834 op1 = const0_rtx;
1835 break;
1837 case GTU:
1838 tmp = gen_reg_rtx (SImode);
1839 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1840 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1841 code = NE;
1842 op0 = tmp;
1843 op1 = const0_rtx;
1844 break;
1846 case GEU:
1847 tmp = gen_reg_rtx (SImode);
1848 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1849 code = NE;
1850 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1851 code = EQ;
1852 else
1853 gcc_unreachable ();
1854 op0 = tmp;
1855 op1 = const0_rtx;
1856 break;
1858 default:
1859 gcc_unreachable ();
1862 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1865 const char *
1866 mep_emit_cbranch (rtx *operands, int ne)
1868 if (GET_CODE (operands[1]) == REG)
1869 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1870 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1871 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1872 else
1873 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1876 void
1877 mep_expand_call (rtx *operands, int returns_value)
1879 rtx addr = operands[returns_value];
1880 rtx tp = mep_tp_rtx ();
1881 rtx gp = mep_gp_rtx ();
1883 gcc_assert (GET_CODE (addr) == MEM);
1885 addr = XEXP (addr, 0);
1887 if (! mep_call_address_operand (addr, VOIDmode))
1888 addr = force_reg (SImode, addr);
1890 if (! operands[returns_value+2])
1891 operands[returns_value+2] = const0_rtx;
1893 if (returns_value)
1894 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1895 operands[3], tp, gp));
1896 else
1897 emit_call_insn (gen_call_internal (addr, operands[1],
1898 operands[2], tp, gp));
1901 /* Aliasing Support. */
1903 /* If X is a machine specific address (i.e. a symbol or label being
1904 referenced as a displacement from the GOT implemented using an
1905 UNSPEC), then return the base term. Otherwise return X. */
1908 mep_find_base_term (rtx x)
1910 rtx base, term;
1911 int unspec;
1913 if (GET_CODE (x) != PLUS)
1914 return x;
1915 base = XEXP (x, 0);
1916 term = XEXP (x, 1);
1918 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1919 && base == mep_tp_rtx ())
1920 unspec = UNS_TPREL;
1921 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1922 && base == mep_gp_rtx ())
1923 unspec = UNS_GPREL;
1924 else
1925 return x;
1927 if (GET_CODE (term) != CONST)
1928 return x;
1929 term = XEXP (term, 0);
1931 if (GET_CODE (term) != UNSPEC
1932 || XINT (term, 1) != unspec)
1933 return x;
1935 return XVECEXP (term, 0, 0);
1938 /* Reload Support. */
1940 /* Return true if the registers in CLASS cannot represent the change from
1941 modes FROM to TO. */
1943 bool
1944 mep_cannot_change_mode_class (machine_mode from, machine_mode to,
1945 enum reg_class regclass)
1947 if (from == to)
1948 return false;
1950 /* 64-bit COP regs must remain 64-bit COP regs. */
1951 if (TARGET_64BIT_CR_REGS
1952 && (regclass == CR_REGS
1953 || regclass == LOADABLE_CR_REGS)
1954 && (GET_MODE_SIZE (to) < 8
1955 || GET_MODE_SIZE (from) < 8))
1956 return true;
1958 return false;
1961 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1963 static bool
1964 mep_general_reg (rtx x)
1966 while (GET_CODE (x) == SUBREG)
1967 x = XEXP (x, 0);
1968 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1971 static bool
1972 mep_nongeneral_reg (rtx x)
1974 while (GET_CODE (x) == SUBREG)
1975 x = XEXP (x, 0);
1976 return (GET_CODE (x) == REG
1977 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1980 static bool
1981 mep_general_copro_reg (rtx x)
1983 while (GET_CODE (x) == SUBREG)
1984 x = XEXP (x, 0);
1985 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1988 static bool
1989 mep_nonregister (rtx x)
1991 while (GET_CODE (x) == SUBREG)
1992 x = XEXP (x, 0);
1993 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1996 #define DEBUG_RELOAD 0
1998 /* Return the secondary reload class needed for moving value X to or
1999 from a register in coprocessor register class CLASS. */
2001 static enum reg_class
2002 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2004 if (mep_general_reg (x))
2005 /* We can do the move directly if mep_have_core_copro_moves_p,
2006 otherwise we need to go through memory. Either way, no secondary
2007 register is needed. */
2008 return NO_REGS;
2010 if (mep_general_copro_reg (x))
2012 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2013 if (mep_have_copro_copro_moves_p)
2014 return NO_REGS;
2016 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2017 if (mep_have_core_copro_moves_p)
2018 return GENERAL_REGS;
2020 /* Otherwise we need to do it through memory. No secondary
2021 register is needed. */
2022 return NO_REGS;
2025 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2026 && constraint_satisfied_p (x, CONSTRAINT_U))
2027 /* X is a memory value that we can access directly. */
2028 return NO_REGS;
2030 /* We have to move X into a GPR first and then copy it to
2031 the coprocessor register. The move from the GPR to the
2032 coprocessor might be done directly or through memory,
2033 depending on mep_have_core_copro_moves_p. */
2034 return GENERAL_REGS;
2037 /* Copying X to register in RCLASS. */
2039 enum reg_class
2040 mep_secondary_input_reload_class (enum reg_class rclass,
2041 machine_mode mode ATTRIBUTE_UNUSED,
2042 rtx x)
2044 int rv = NO_REGS;
2046 #if DEBUG_RELOAD
2047 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2048 debug_rtx (x);
2049 #endif
2051 if (reg_class_subset_p (rclass, CR_REGS))
2052 rv = mep_secondary_copro_reload_class (rclass, x);
2053 else if (MEP_NONGENERAL_CLASS (rclass)
2054 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2055 rv = GENERAL_REGS;
2057 #if DEBUG_RELOAD
2058 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2059 #endif
2060 return (enum reg_class) rv;
2063 /* Copying register in RCLASS to X. */
2065 enum reg_class
2066 mep_secondary_output_reload_class (enum reg_class rclass,
2067 machine_mode mode ATTRIBUTE_UNUSED,
2068 rtx x)
2070 int rv = NO_REGS;
2072 #if DEBUG_RELOAD
2073 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2074 debug_rtx (x);
2075 #endif
2077 if (reg_class_subset_p (rclass, CR_REGS))
2078 rv = mep_secondary_copro_reload_class (rclass, x);
2079 else if (MEP_NONGENERAL_CLASS (rclass)
2080 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2081 rv = GENERAL_REGS;
2083 #if DEBUG_RELOAD
2084 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2085 #endif
2087 return (enum reg_class) rv;
2090 /* Implement SECONDARY_MEMORY_NEEDED. */
2092 bool
2093 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2094 machine_mode mode ATTRIBUTE_UNUSED)
2096 if (!mep_have_core_copro_moves_p)
2098 if (reg_classes_intersect_p (rclass1, CR_REGS)
2099 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2100 return true;
2101 if (reg_classes_intersect_p (rclass2, CR_REGS)
2102 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2103 return true;
2104 if (!mep_have_copro_copro_moves_p
2105 && reg_classes_intersect_p (rclass1, CR_REGS)
2106 && reg_classes_intersect_p (rclass2, CR_REGS))
2107 return true;
2109 return false;
2112 void
2113 mep_expand_reload (rtx *operands, machine_mode mode)
2115 /* There are three cases for each direction:
2116 register, farsym
2117 control, farsym
2118 control, nearsym */
2120 int s0 = mep_section_tag (operands[0]) == 'f';
2121 int s1 = mep_section_tag (operands[1]) == 'f';
2122 int c0 = mep_nongeneral_reg (operands[0]);
2123 int c1 = mep_nongeneral_reg (operands[1]);
2124 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2126 #if DEBUG_RELOAD
2127 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2128 debug_rtx (operands[0]);
2129 debug_rtx (operands[1]);
2130 #endif
2132 switch (which)
2134 case 00: /* Don't know why this gets here. */
2135 case 02: /* general = far */
2136 emit_move_insn (operands[0], operands[1]);
2137 return;
2139 case 10: /* cr = mem */
2140 case 11: /* cr = cr */
2141 case 01: /* mem = cr */
2142 case 12: /* cr = far */
2143 emit_move_insn (operands[2], operands[1]);
2144 emit_move_insn (operands[0], operands[2]);
2145 return;
2147 case 20: /* far = general */
2148 emit_move_insn (operands[2], XEXP (operands[1], 0));
2149 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2150 return;
2152 case 21: /* far = cr */
2153 case 22: /* far = far */
2154 default:
2155 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2156 which, mode_name[mode]);
2157 debug_rtx (operands[0]);
2158 debug_rtx (operands[1]);
2159 gcc_unreachable ();
2163 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2164 can be moved directly into registers 0 to 7, but not into the rest.
2165 If so, and if the required class includes registers 0 to 7, restrict
2166 it to those registers. */
2168 enum reg_class
2169 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2171 switch (GET_CODE (x))
2173 case CONST_INT:
2174 if (INTVAL (x) >= 0x10000
2175 && INTVAL (x) < 0x01000000
2176 && (INTVAL (x) & 0xffff) != 0
2177 && reg_class_subset_p (TPREL_REGS, rclass))
2178 rclass = TPREL_REGS;
2179 break;
2181 case CONST:
2182 case SYMBOL_REF:
2183 case LABEL_REF:
2184 if (mep_section_tag (x) != 'f'
2185 && reg_class_subset_p (TPREL_REGS, rclass))
2186 rclass = TPREL_REGS;
2187 break;
2189 default:
2190 break;
2192 return rclass;
2195 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2196 moves, 4 for direct double-register moves, and 1000 for anything
2197 that requires a temporary register or temporary stack slot. */
2200 mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
2202 if (mep_have_copro_copro_moves_p
2203 && reg_class_subset_p (from, CR_REGS)
2204 && reg_class_subset_p (to, CR_REGS))
2206 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2207 return 4;
2208 return 2;
2210 if (reg_class_subset_p (from, CR_REGS)
2211 && reg_class_subset_p (to, CR_REGS))
2213 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2214 return 8;
2215 return 4;
2217 if (reg_class_subset_p (from, CR_REGS)
2218 || reg_class_subset_p (to, CR_REGS))
2220 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2221 return 4;
2222 return 2;
2224 if (mep_secondary_memory_needed (from, to, mode))
2225 return 1000;
2226 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2227 return 1000;
2229 if (GET_MODE_SIZE (mode) > 4)
2230 return 4;
2232 return 2;
2236 /* Functions to save and restore machine-specific function data. */
2238 static struct machine_function *
2239 mep_init_machine_status (void)
2241 return ggc_cleared_alloc<machine_function> ();
2244 static rtx
2245 mep_allocate_initial_value (rtx reg)
2247 int rss;
2249 if (GET_CODE (reg) != REG)
2250 return NULL_RTX;
2252 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2253 return NULL_RTX;
2255 /* In interrupt functions, the "initial" values of $gp and $tp are
2256 provided by the prologue. They are not necessarily the same as
2257 the values that the caller was using. */
2258 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2259 if (mep_interrupt_p ())
2260 return NULL_RTX;
2262 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2264 cfun->machine->reg_save_size += 4;
2265 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2268 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2269 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2273 mep_return_addr_rtx (int count)
2275 if (count != 0)
2276 return const0_rtx;
2278 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2281 static rtx
2282 mep_tp_rtx (void)
2284 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2287 static rtx
2288 mep_gp_rtx (void)
2290 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2293 static bool
2294 mep_interrupt_p (void)
2296 if (cfun->machine->interrupt_handler == 0)
2298 int interrupt_handler
2299 = (lookup_attribute ("interrupt",
2300 DECL_ATTRIBUTES (current_function_decl))
2301 != NULL_TREE);
2302 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2304 return cfun->machine->interrupt_handler == 2;
2307 static bool
2308 mep_disinterrupt_p (void)
2310 if (cfun->machine->disable_interrupts == 0)
2312 int disable_interrupts
2313 = (lookup_attribute ("disinterrupt",
2314 DECL_ATTRIBUTES (current_function_decl))
2315 != NULL_TREE);
2316 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2318 return cfun->machine->disable_interrupts == 2;
2322 /* Frame/Epilog/Prolog Related. */
2324 static bool
2325 mep_reg_set_p (rtx reg, rtx insn)
2327 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2328 if (INSN_P (insn))
2330 if (FIND_REG_INC_NOTE (insn, reg))
2331 return true;
2332 insn = PATTERN (insn);
2335 if (GET_CODE (insn) == SET
2336 && GET_CODE (XEXP (insn, 0)) == REG
2337 && GET_CODE (XEXP (insn, 1)) == REG
2338 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2339 return false;
2341 return set_of (reg, insn) != NULL_RTX;
2345 #define MEP_SAVES_UNKNOWN 0
2346 #define MEP_SAVES_YES 1
2347 #define MEP_SAVES_MAYBE 2
2348 #define MEP_SAVES_NO 3
2350 static bool
2351 mep_reg_set_in_function (int regno)
2353 rtx reg;
2354 rtx_insn *insn;
2356 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2357 return true;
2359 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2360 return true;
2362 push_topmost_sequence ();
2363 insn = get_insns ();
2364 pop_topmost_sequence ();
2366 if (!insn)
2367 return false;
2369 reg = gen_rtx_REG (SImode, regno);
2371 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2372 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2373 return true;
2374 return false;
2377 static bool
2378 mep_asm_without_operands_p (void)
2380 if (cfun->machine->asms_without_operands == 0)
2382 rtx_insn *insn;
2384 push_topmost_sequence ();
2385 insn = get_insns ();
2386 pop_topmost_sequence ();
2388 cfun->machine->asms_without_operands = 1;
2389 while (insn)
2391 if (INSN_P (insn)
2392 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2394 cfun->machine->asms_without_operands = 2;
2395 break;
2397 insn = NEXT_INSN (insn);
2401 return cfun->machine->asms_without_operands == 2;
2404 /* Interrupt functions save/restore every call-preserved register, and
2405 any call-used register it uses (or all if it calls any function,
2406 since they may get clobbered there too). Here we check to see
2407 which call-used registers need saving. */
2409 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2410 && (r == FIRST_CCR_REGNO + 1 \
2411 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2412 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2414 static bool
2415 mep_interrupt_saved_reg (int r)
2417 if (!mep_interrupt_p ())
2418 return false;
2419 if (r == REGSAVE_CONTROL_TEMP
2420 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2421 return true;
2422 if (mep_asm_without_operands_p ()
2423 && (!fixed_regs[r]
2424 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2425 || IVC2_ISAVED_REG (r)))
2426 return true;
2427 if (!crtl->is_leaf)
2428 /* Function calls mean we need to save $lp. */
2429 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2430 return true;
2431 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2432 /* The interrupt handler might use these registers for repeat blocks,
2433 or it might call a function that does so. */
2434 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2435 return true;
2436 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2437 return false;
2438 /* Functions we call might clobber these. */
2439 if (call_used_regs[r] && !fixed_regs[r])
2440 return true;
2441 /* Additional registers that need to be saved for IVC2. */
2442 if (IVC2_ISAVED_REG (r))
2443 return true;
2445 return false;
2448 static bool
2449 mep_call_saves_register (int r)
2451 if (! cfun->machine->frame_locked)
2453 int rv = MEP_SAVES_NO;
2455 if (cfun->machine->reg_save_slot[r])
2456 rv = MEP_SAVES_YES;
2457 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2458 rv = MEP_SAVES_YES;
2459 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2460 rv = MEP_SAVES_YES;
2461 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2462 rv = MEP_SAVES_YES;
2463 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2464 /* We need these to have stack slots so that they can be set during
2465 unwinding. */
2466 rv = MEP_SAVES_YES;
2467 else if (mep_interrupt_saved_reg (r))
2468 rv = MEP_SAVES_YES;
2469 cfun->machine->reg_saved[r] = rv;
2471 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2474 /* Return true if epilogue uses register REGNO. */
2476 bool
2477 mep_epilogue_uses (int regno)
2479 /* Since $lp is a call-saved register, the generic code will normally
2480 mark it used in the epilogue if it needs to be saved and restored.
2481 However, when profiling is enabled, the profiling code will implicitly
2482 clobber $11. This case has to be handled specially both here and in
2483 mep_call_saves_register. */
2484 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2485 return true;
2486 /* Interrupt functions save/restore pretty much everything. */
2487 return (reload_completed && mep_interrupt_saved_reg (regno));
2490 static int
2491 mep_reg_size (int regno)
2493 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2494 return 8;
2495 return 4;
2498 /* Worker function for TARGET_CAN_ELIMINATE. */
2500 bool
2501 mep_can_eliminate (const int from, const int to)
2503 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2504 ? ! frame_pointer_needed
2505 : true);
2509 mep_elimination_offset (int from, int to)
2511 int reg_save_size;
2512 int i;
2513 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2514 int total_size;
2516 if (!cfun->machine->frame_locked)
2517 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2519 /* We don't count arg_regs_to_save in the arg pointer offset, because
2520 gcc thinks the arg pointer has moved along with the saved regs.
2521 However, we do count it when we adjust $sp in the prologue. */
2522 reg_save_size = 0;
2523 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2524 if (mep_call_saves_register (i))
2525 reg_save_size += mep_reg_size (i);
2527 if (reg_save_size % 8)
2528 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2529 else
2530 cfun->machine->regsave_filler = 0;
2532 /* This is what our total stack adjustment looks like. */
2533 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2535 if (total_size % 8)
2536 cfun->machine->frame_filler = 8 - (total_size % 8);
2537 else
2538 cfun->machine->frame_filler = 0;
2541 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2542 return reg_save_size + cfun->machine->regsave_filler;
2544 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2545 return cfun->machine->frame_filler + frame_size;
2547 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2548 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2550 gcc_unreachable ();
2553 static rtx_insn *
2554 F (rtx_insn *x)
2556 RTX_FRAME_RELATED_P (x) = 1;
2557 return x;
2560 /* Since the prologue/epilogue code is generated after optimization,
2561 we can't rely on gcc to split constants for us. So, this code
2562 captures all the ways to add a constant to a register in one logic
2563 chunk, including optimizing away insns we just don't need. This
2564 makes the prolog/epilog code easier to follow. */
2565 static void
2566 add_constant (int dest, int src, int value, int mark_frame)
2568 rtx_insn *insn;
2569 int hi, lo;
2571 if (src == dest && value == 0)
2572 return;
2574 if (value == 0)
2576 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2577 gen_rtx_REG (SImode, src));
2578 if (mark_frame)
2579 RTX_FRAME_RELATED_P(insn) = 1;
2580 return;
2583 if (value >= -32768 && value <= 32767)
2585 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2586 gen_rtx_REG (SImode, src),
2587 GEN_INT (value)));
2588 if (mark_frame)
2589 RTX_FRAME_RELATED_P(insn) = 1;
2590 return;
2593 /* Big constant, need to use a temp register. We use
2594 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2595 area is always small enough to directly add to). */
2597 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2598 lo = value & 0xffff;
2600 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2601 GEN_INT (hi));
2603 if (lo)
2605 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2606 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2607 GEN_INT (lo)));
2610 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2611 gen_rtx_REG (SImode, src),
2612 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2613 if (mark_frame)
2615 RTX_FRAME_RELATED_P(insn) = 1;
2616 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2617 gen_rtx_SET (gen_rtx_REG (SImode, dest),
2618 gen_rtx_PLUS (SImode,
2619 gen_rtx_REG (SImode, dest),
2620 GEN_INT (value))));
2624 /* Move SRC to DEST. Mark the move as being potentially dead if
2625 MAYBE_DEAD_P. */
2627 static rtx_insn *
2628 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2630 rtx_insn *insn = emit_move_insn (dest, src);
2631 #if 0
2632 if (maybe_dead_p)
2633 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2634 #endif
2635 return insn;
2638 /* Used for interrupt functions, which can't assume that $tp and $gp
2639 contain the correct pointers. */
2641 static void
2642 mep_reload_pointer (int regno, const char *symbol)
2644 rtx reg, sym;
2646 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2647 return;
2649 reg = gen_rtx_REG (SImode, regno);
2650 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2651 emit_insn (gen_movsi_topsym_s (reg, sym));
2652 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2655 /* Assign save slots for any register not already saved. DImode
2656 registers go at the end of the reg save area; the rest go at the
2657 beginning. This is for alignment purposes. Returns true if a frame
2658 is really needed. */
2659 static bool
2660 mep_assign_save_slots (int reg_save_size)
2662 bool really_need_stack_frame = false;
2663 int di_ofs = 0;
2664 int i;
2666 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2667 if (mep_call_saves_register(i))
2669 int regsize = mep_reg_size (i);
2671 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2672 || mep_reg_set_in_function (i))
2673 really_need_stack_frame = true;
2675 if (cfun->machine->reg_save_slot[i])
2676 continue;
2678 if (regsize < 8)
2680 cfun->machine->reg_save_size += regsize;
2681 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2683 else
2685 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2686 di_ofs += 8;
2689 cfun->machine->frame_locked = 1;
2690 return really_need_stack_frame;
2693 void
2694 mep_expand_prologue (void)
2696 int i, rss, sp_offset = 0;
2697 int reg_save_size;
2698 int frame_size;
2699 int really_need_stack_frame;
2701 /* We must not allow register renaming in interrupt functions,
2702 because that invalidates the correctness of the set of call-used
2703 registers we're going to save/restore. */
2704 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2706 if (mep_disinterrupt_p ())
2707 emit_insn (gen_mep_disable_int ());
2709 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2711 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2712 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2713 really_need_stack_frame = frame_size;
2715 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2717 sp_offset = reg_save_size;
2718 if (sp_offset + frame_size < 128)
2719 sp_offset += frame_size ;
2721 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2723 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2724 if (mep_call_saves_register(i))
2726 rtx mem;
2727 bool maybe_dead_p;
2728 machine_mode rmode;
2730 rss = cfun->machine->reg_save_slot[i];
2732 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2733 && (!mep_reg_set_in_function (i)
2734 && !mep_interrupt_p ()))
2735 continue;
2737 if (mep_reg_size (i) == 8)
2738 rmode = DImode;
2739 else
2740 rmode = SImode;
2742 /* If there is a pseudo associated with this register's initial value,
2743 reload might have already spilt it to the stack slot suggested by
2744 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2745 deleted as dead. */
2746 mem = gen_rtx_MEM (rmode,
2747 plus_constant (Pmode, stack_pointer_rtx,
2748 sp_offset - rss));
2749 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2751 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2752 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2753 else if (rmode == DImode)
2755 rtx_insn *insn;
2756 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2758 mem = gen_rtx_MEM (SImode,
2759 plus_constant (Pmode, stack_pointer_rtx,
2760 sp_offset - rss + be));
2762 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2763 gen_rtx_REG (SImode, i),
2764 maybe_dead_p);
2765 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2766 gen_rtx_ZERO_EXTRACT (SImode,
2767 gen_rtx_REG (DImode, i),
2768 GEN_INT (32),
2769 GEN_INT (32)),
2770 maybe_dead_p);
2771 insn = maybe_dead_move (mem,
2772 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2773 maybe_dead_p);
2774 RTX_FRAME_RELATED_P (insn) = 1;
2776 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2777 gen_rtx_SET (copy_rtx (mem),
2778 gen_rtx_REG (rmode, i)));
2779 mem = gen_rtx_MEM (SImode,
2780 plus_constant (Pmode, stack_pointer_rtx,
2781 sp_offset - rss + (4-be)));
2782 insn = maybe_dead_move (mem,
2783 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2784 maybe_dead_p);
2786 else
2788 rtx_insn *insn;
2789 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2790 gen_rtx_REG (rmode, i),
2791 maybe_dead_p);
2792 insn = maybe_dead_move (mem,
2793 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2794 maybe_dead_p);
2795 RTX_FRAME_RELATED_P (insn) = 1;
2797 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2798 gen_rtx_SET (copy_rtx (mem),
2799 gen_rtx_REG (rmode, i)));
2803 if (frame_pointer_needed)
2805 /* We've already adjusted down by sp_offset. Total $sp change
2806 is reg_save_size + frame_size. We want a net change here of
2807 just reg_save_size. */
2808 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2811 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2813 if (mep_interrupt_p ())
2815 mep_reload_pointer(GP_REGNO, "__sdabase");
2816 mep_reload_pointer(TP_REGNO, "__tpbase");
2820 static void
2821 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2823 int local = hwi_local;
2824 int frame_size = local + crtl->outgoing_args_size;
2825 int reg_save_size;
2826 int ffill;
2827 int i, sp, skip;
2828 int sp_offset;
2829 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2831 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2832 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2833 sp_offset = reg_save_size + frame_size;
2835 ffill = cfun->machine->frame_filler;
2837 if (cfun->machine->mep_frame_pointer_needed)
2838 reg_names[FP_REGNO] = "$fp";
2839 else
2840 reg_names[FP_REGNO] = "$8";
2842 if (sp_offset == 0)
2843 return;
2845 if (debug_info_level == DINFO_LEVEL_NONE)
2847 fprintf (file, "\t# frame: %d", sp_offset);
2848 if (reg_save_size)
2849 fprintf (file, " %d regs", reg_save_size);
2850 if (local)
2851 fprintf (file, " %d locals", local);
2852 if (crtl->outgoing_args_size)
2853 fprintf (file, " %d args", crtl->outgoing_args_size);
2854 fprintf (file, "\n");
2855 return;
2858 fprintf (file, "\t#\n");
2859 fprintf (file, "\t# Initial Frame Information:\n");
2860 if (sp_offset || !frame_pointer_needed)
2861 fprintf (file, "\t# Entry ---------- 0\n");
2863 /* Sort registers by save slots, so they're printed in the order
2864 they appear in memory, not the order they're saved in. */
2865 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2866 slot_map[si] = si;
2867 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2868 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2869 if (cfun->machine->reg_save_slot[slot_map[si]]
2870 > cfun->machine->reg_save_slot[slot_map[sj]])
2872 int t = slot_map[si];
2873 slot_map[si] = slot_map[sj];
2874 slot_map[sj] = t;
2877 sp = 0;
2878 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2880 int rsize;
2881 int r = slot_map[i];
2882 int rss = cfun->machine->reg_save_slot[r];
2884 if (!mep_call_saves_register (r))
2885 continue;
2887 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2888 && (!mep_reg_set_in_function (r)
2889 && !mep_interrupt_p ()))
2890 continue;
2892 rsize = mep_reg_size(r);
2893 skip = rss - (sp+rsize);
2894 if (skip)
2895 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2896 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2897 rsize, reg_names[r], sp_offset - rss);
2898 sp = rss;
2901 skip = reg_save_size - sp;
2902 if (skip)
2903 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2905 if (frame_pointer_needed)
2906 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2907 if (local)
2908 fprintf (file, "\t# %3d bytes for local vars\n", local);
2909 if (ffill)
2910 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2911 if (crtl->outgoing_args_size)
2912 fprintf (file, "\t# %3d bytes for outgoing args\n",
2913 crtl->outgoing_args_size);
2914 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2915 fprintf (file, "\t#\n");
2919 static int mep_prevent_lp_restore = 0;
2920 static int mep_sibcall_epilogue = 0;
2922 void
2923 mep_expand_epilogue (void)
2925 int i, sp_offset = 0;
2926 int reg_save_size = 0;
2927 int frame_size;
2928 int lp_temp = LP_REGNO, lp_slot = -1;
2929 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2930 int interrupt_handler = mep_interrupt_p ();
2932 if (profile_arc_flag == 2)
2933 emit_insn (gen_mep_bb_trace_ret ());
2935 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2936 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2938 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2940 if (frame_pointer_needed)
2942 /* If we have a frame pointer, we won't have a reliable stack
2943 pointer (alloca, you know), so rebase SP from FP */
2944 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2945 gen_rtx_REG (SImode, FP_REGNO));
2946 sp_offset = reg_save_size;
2948 else
2950 /* SP is right under our local variable space. Adjust it if
2951 needed. */
2952 sp_offset = reg_save_size + frame_size;
2953 if (sp_offset >= 128)
2955 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2956 sp_offset -= frame_size;
2960 /* This is backwards so that we restore the control and coprocessor
2961 registers before the temporary registers we use to restore
2962 them. */
2963 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2964 if (mep_call_saves_register (i))
2966 machine_mode rmode;
2967 int rss = cfun->machine->reg_save_slot[i];
2969 if (mep_reg_size (i) == 8)
2970 rmode = DImode;
2971 else
2972 rmode = SImode;
2974 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2975 && !(mep_reg_set_in_function (i) || interrupt_handler))
2976 continue;
2977 if (mep_prevent_lp_restore && i == LP_REGNO)
2978 continue;
2979 if (!mep_prevent_lp_restore
2980 && !interrupt_handler
2981 && (i == 10 || i == 11))
2982 continue;
2984 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2985 emit_move_insn (gen_rtx_REG (rmode, i),
2986 gen_rtx_MEM (rmode,
2987 plus_constant (Pmode, stack_pointer_rtx,
2988 sp_offset - rss)));
2989 else
2991 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2992 /* Defer this one so we can jump indirect rather than
2993 copying the RA to $lp and "ret". EH epilogues
2994 automatically skip this anyway. */
2995 lp_slot = sp_offset-rss;
2996 else
2998 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2999 gen_rtx_MEM (rmode,
3000 plus_constant (Pmode,
3001 stack_pointer_rtx,
3002 sp_offset-rss)));
3003 emit_move_insn (gen_rtx_REG (rmode, i),
3004 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3008 if (lp_slot != -1)
3010 /* Restore this one last so we know it will be in the temp
3011 register when we return by jumping indirectly via the temp. */
3012 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3013 gen_rtx_MEM (SImode,
3014 plus_constant (Pmode, stack_pointer_rtx,
3015 lp_slot)));
3016 lp_temp = REGSAVE_CONTROL_TEMP;
3020 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3022 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3023 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3024 gen_rtx_REG (SImode, SP_REGNO),
3025 cfun->machine->eh_stack_adjust));
3027 if (mep_sibcall_epilogue)
3028 return;
3030 if (mep_disinterrupt_p ())
3031 emit_insn (gen_mep_enable_int ());
3033 if (mep_prevent_lp_restore)
3035 emit_jump_insn (gen_eh_return_internal ());
3036 emit_barrier ();
3038 else if (interrupt_handler)
3039 emit_jump_insn (gen_mep_reti ());
3040 else
3041 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3044 void
3045 mep_expand_eh_return (rtx *operands)
3047 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3049 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3050 emit_move_insn (ra, operands[0]);
3051 operands[0] = ra;
3054 emit_insn (gen_eh_epilogue (operands[0]));
3057 void
3058 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3060 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3061 mep_prevent_lp_restore = 1;
3062 mep_expand_epilogue ();
3063 mep_prevent_lp_restore = 0;
3066 void
3067 mep_expand_sibcall_epilogue (void)
3069 mep_sibcall_epilogue = 1;
3070 mep_expand_epilogue ();
3071 mep_sibcall_epilogue = 0;
3074 static bool
3075 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3077 if (decl == NULL)
3078 return false;
3080 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3081 return false;
3083 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3084 if (mep_interrupt_p () || mep_disinterrupt_p ())
3085 return false;
3087 return true;
3091 mep_return_stackadj_rtx (void)
3093 return gen_rtx_REG (SImode, 10);
3097 mep_return_handler_rtx (void)
3099 return gen_rtx_REG (SImode, LP_REGNO);
3102 void
3103 mep_function_profiler (FILE *file)
3105 /* Always right at the beginning of the function. */
3106 fprintf (file, "\t# mep function profiler\n");
3107 fprintf (file, "\tadd\t$sp, -8\n");
3108 fprintf (file, "\tsw\t$0, ($sp)\n");
3109 fprintf (file, "\tldc\t$0, $lp\n");
3110 fprintf (file, "\tsw\t$0, 4($sp)\n");
3111 fprintf (file, "\tbsr\t__mep_mcount\n");
3112 fprintf (file, "\tlw\t$0, 4($sp)\n");
3113 fprintf (file, "\tstc\t$0, $lp\n");
3114 fprintf (file, "\tlw\t$0, ($sp)\n");
3115 fprintf (file, "\tadd\t$sp, 8\n\n");
3118 const char *
3119 mep_emit_bb_trace_ret (void)
3121 fprintf (asm_out_file, "\t# end of block profiling\n");
3122 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3123 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3124 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3125 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3126 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3127 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3128 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3129 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3130 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3131 return "";
3134 #undef SAVE
3135 #undef RESTORE
3137 /* Operand Printing. */
3139 void
3140 mep_print_operand_address (FILE *stream, rtx address)
3142 if (GET_CODE (address) == MEM)
3143 address = XEXP (address, 0);
3144 else
3145 /* cf: gcc.dg/asm-4.c. */
3146 gcc_assert (GET_CODE (address) == REG);
3148 mep_print_operand (stream, address, 0);
3151 static struct
3153 char code;
3154 const char *pattern;
3155 const char *format;
3157 const conversions[] =
3159 { 0, "r", "0" },
3160 { 0, "m+ri", "3(2)" },
3161 { 0, "mr", "(1)" },
3162 { 0, "ms", "(1)" },
3163 { 0, "ml", "(1)" },
3164 { 0, "mLrs", "%lo(3)(2)" },
3165 { 0, "mLr+si", "%lo(4+5)(2)" },
3166 { 0, "m+ru2s", "%tpoff(5)(2)" },
3167 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3168 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3169 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3170 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3171 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3172 { 0, "mi", "(1)" },
3173 { 0, "m+si", "(2+3)" },
3174 { 0, "m+li", "(2+3)" },
3175 { 0, "i", "0" },
3176 { 0, "s", "0" },
3177 { 0, "+si", "1+2" },
3178 { 0, "+u2si", "%tpoff(3+4)" },
3179 { 0, "+u3si", "%sdaoff(3+4)" },
3180 { 0, "l", "0" },
3181 { 'b', "i", "0" },
3182 { 'B', "i", "0" },
3183 { 'U', "i", "0" },
3184 { 'h', "i", "0" },
3185 { 'h', "Hs", "%hi(1)" },
3186 { 'I', "i", "0" },
3187 { 'I', "u2s", "%tpoff(2)" },
3188 { 'I', "u3s", "%sdaoff(2)" },
3189 { 'I', "+u2si", "%tpoff(3+4)" },
3190 { 'I', "+u3si", "%sdaoff(3+4)" },
3191 { 'J', "i", "0" },
3192 { 'P', "mr", "(1\\+),\\0" },
3193 { 'x', "i", "0" },
3194 { 0, 0, 0 }
3197 static int
3198 unique_bit_in (HOST_WIDE_INT i)
3200 switch (i & 0xff)
3202 case 0x01: case 0xfe: return 0;
3203 case 0x02: case 0xfd: return 1;
3204 case 0x04: case 0xfb: return 2;
3205 case 0x08: case 0xf7: return 3;
3206 case 0x10: case 0x7f: return 4;
3207 case 0x20: case 0xbf: return 5;
3208 case 0x40: case 0xdf: return 6;
3209 case 0x80: case 0xef: return 7;
3210 default:
3211 gcc_unreachable ();
3215 static int
3216 bit_size_for_clip (HOST_WIDE_INT i)
3218 int rv;
3220 for (rv = 0; rv < 31; rv ++)
3221 if (((HOST_WIDE_INT) 1 << rv) > i)
3222 return rv + 1;
3223 gcc_unreachable ();
3226 /* Print an operand to a assembler instruction. */
3228 void
3229 mep_print_operand (FILE *file, rtx x, int code)
3231 int i, j;
3232 const char *real_name;
3234 if (code == '<')
3236 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3237 we're using, then skip over the "mep_" part of its name. */
3238 const struct cgen_insn *insn;
3240 if (mep_get_move_insn (mep_cmov, &insn))
3241 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3242 else
3243 mep_intrinsic_unavailable (mep_cmov);
3244 return;
3246 if (code == 'L')
3248 switch (GET_CODE (x))
3250 case AND:
3251 fputs ("clr", file);
3252 return;
3253 case IOR:
3254 fputs ("set", file);
3255 return;
3256 case XOR:
3257 fputs ("not", file);
3258 return;
3259 default:
3260 output_operand_lossage ("invalid %%L code");
3263 if (code == 'M')
3265 /* Print the second operand of a CR <- CR move. If we're using
3266 a two-operand instruction (i.e., a real cmov), then just print
3267 the operand normally. If we're using a "reg, reg, immediate"
3268 instruction such as caddi3, print the operand followed by a
3269 zero field. If we're using a three-register instruction,
3270 print the operand twice. */
3271 const struct cgen_insn *insn;
3273 mep_print_operand (file, x, 0);
3274 if (mep_get_move_insn (mep_cmov, &insn)
3275 && insn_data[insn->icode].n_operands == 3)
3277 fputs (", ", file);
3278 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3279 mep_print_operand (file, x, 0);
3280 else
3281 mep_print_operand (file, const0_rtx, 0);
3283 return;
3286 encode_pattern (x);
3287 for (i = 0; conversions[i].pattern; i++)
3288 if (conversions[i].code == code
3289 && strcmp(conversions[i].pattern, pattern) == 0)
3291 for (j = 0; conversions[i].format[j]; j++)
3292 if (conversions[i].format[j] == '\\')
3294 fputc (conversions[i].format[j+1], file);
3295 j++;
3297 else if (ISDIGIT(conversions[i].format[j]))
3299 rtx r = patternr[conversions[i].format[j] - '0'];
3300 switch (GET_CODE (r))
3302 case REG:
3303 fprintf (file, "%s", reg_names [REGNO (r)]);
3304 break;
3305 case CONST_INT:
3306 switch (code)
3308 case 'b':
3309 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3310 break;
3311 case 'B':
3312 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3313 break;
3314 case 'h':
3315 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3316 break;
3317 case 'U':
3318 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3319 break;
3320 case 'J':
3321 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3322 break;
3323 case 'x':
3324 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3325 && !(INTVAL (r) & 0xff))
3326 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3327 else
3328 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3329 break;
3330 case 'I':
3331 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3332 && conversions[i].format[j+1] == 0)
3334 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3335 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3337 else
3338 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3339 break;
3340 default:
3341 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3342 break;
3344 break;
3345 case CONST_DOUBLE:
3346 fprintf(file, "[const_double 0x%lx]",
3347 (unsigned long) CONST_DOUBLE_HIGH(r));
3348 break;
3349 case SYMBOL_REF:
3350 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3351 assemble_name (file, real_name);
3352 break;
3353 case LABEL_REF:
3354 output_asm_label (r);
3355 break;
3356 default:
3357 fprintf (stderr, "don't know how to print this operand:");
3358 debug_rtx (r);
3359 gcc_unreachable ();
3362 else
3364 if (conversions[i].format[j] == '+'
3365 && (!code || code == 'I')
3366 && ISDIGIT (conversions[i].format[j+1])
3367 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3368 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3369 continue;
3370 fputc(conversions[i].format[j], file);
3372 break;
3374 if (!conversions[i].pattern)
3376 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3377 debug_rtx(x);
3380 return;
3383 void
3384 mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
3385 int noperands ATTRIBUTE_UNUSED)
3387 /* Despite the fact that MeP is perfectly capable of branching and
3388 doing something else in the same bundle, gcc does jump
3389 optimization *after* scheduling, so we cannot trust the bundling
3390 flags on jump instructions. */
3391 if (GET_MODE (insn) == BImode
3392 && get_attr_slots (insn) != SLOTS_CORE)
3393 fputc ('+', asm_out_file);
3396 /* Function args in registers. */
3398 static void
3399 mep_setup_incoming_varargs (cumulative_args_t cum,
3400 machine_mode mode ATTRIBUTE_UNUSED,
3401 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3402 int second_time ATTRIBUTE_UNUSED)
3404 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3406 if (nsave > 0)
3407 cfun->machine->arg_regs_to_save = nsave;
3408 *pretend_size = nsave * 4;
3411 static int
3412 bytesize (const_tree type, machine_mode mode)
3414 if (mode == BLKmode)
3415 return int_size_in_bytes (type);
3416 return GET_MODE_SIZE (mode);
3419 static rtx
3420 mep_expand_builtin_saveregs (void)
3422 int bufsize, i, ns;
3423 rtx regbuf;
3425 ns = cfun->machine->arg_regs_to_save;
3426 if (TARGET_IVC2)
3428 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3429 regbuf = assign_stack_local (SImode, bufsize, 64);
3431 else
3433 bufsize = ns * 4;
3434 regbuf = assign_stack_local (SImode, bufsize, 32);
3437 move_block_from_reg (5-ns, regbuf, ns);
3439 if (TARGET_IVC2)
3441 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3442 int ofs = 8 * ((ns+1)/2);
3444 for (i=0; i<ns; i++)
3446 int rn = (4-ns) + i + 49;
3447 rtx ptr;
3449 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3450 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3451 ofs += 8;
3454 return XEXP (regbuf, 0);
3457 static tree
3458 mep_build_builtin_va_list (void)
3460 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3461 tree record;
3464 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3466 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3467 get_identifier ("__va_next_gp"), ptr_type_node);
3468 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3469 get_identifier ("__va_next_gp_limit"),
3470 ptr_type_node);
3471 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3472 ptr_type_node);
3473 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3474 ptr_type_node);
3476 DECL_FIELD_CONTEXT (f_next_gp) = record;
3477 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3478 DECL_FIELD_CONTEXT (f_next_cop) = record;
3479 DECL_FIELD_CONTEXT (f_next_stack) = record;
3481 TYPE_FIELDS (record) = f_next_gp;
3482 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3483 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3484 DECL_CHAIN (f_next_cop) = f_next_stack;
3486 layout_type (record);
3488 return record;
3491 static void
3492 mep_expand_va_start (tree valist, rtx nextarg)
3494 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3495 tree next_gp, next_gp_limit, next_cop, next_stack;
3496 tree t, u;
3497 int ns;
3499 ns = cfun->machine->arg_regs_to_save;
3501 f_next_gp = TYPE_FIELDS (va_list_type_node);
3502 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3503 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3504 f_next_stack = DECL_CHAIN (f_next_cop);
3506 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3507 NULL_TREE);
3508 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3509 valist, f_next_gp_limit, NULL_TREE);
3510 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3511 NULL_TREE);
3512 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3513 valist, f_next_stack, NULL_TREE);
3515 /* va_list.next_gp = expand_builtin_saveregs (); */
3516 u = make_tree (sizetype, expand_builtin_saveregs ());
3517 u = fold_convert (ptr_type_node, u);
3518 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3519 TREE_SIDE_EFFECTS (t) = 1;
3520 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3522 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3523 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3524 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3525 TREE_SIDE_EFFECTS (t) = 1;
3526 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3528 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3529 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3530 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3531 TREE_SIDE_EFFECTS (t) = 1;
3532 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3534 /* va_list.next_stack = nextarg; */
3535 u = make_tree (ptr_type_node, nextarg);
3536 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3537 TREE_SIDE_EFFECTS (t) = 1;
3538 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3541 static tree
3542 mep_gimplify_va_arg_expr (tree valist, tree type,
3543 gimple_seq *pre_p,
3544 gimple_seq *post_p ATTRIBUTE_UNUSED)
3546 HOST_WIDE_INT size, rsize;
3547 bool by_reference, ivc2_vec;
3548 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3549 tree next_gp, next_gp_limit, next_cop, next_stack;
3550 tree label_sover, label_selse;
3551 tree tmp, res_addr;
3553 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3555 size = int_size_in_bytes (type);
3556 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3558 if (by_reference)
3560 type = build_pointer_type (type);
3561 size = 4;
3563 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3565 f_next_gp = TYPE_FIELDS (va_list_type_node);
3566 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3567 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3568 f_next_stack = DECL_CHAIN (f_next_cop);
3570 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3571 NULL_TREE);
3572 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3573 valist, f_next_gp_limit, NULL_TREE);
3574 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3575 NULL_TREE);
3576 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3577 valist, f_next_stack, NULL_TREE);
3579 /* if f_next_gp < f_next_gp_limit
3580 IF (VECTOR_P && IVC2)
3581 val = *f_next_cop;
3582 ELSE
3583 val = *f_next_gp;
3584 f_next_gp += 4;
3585 f_next_cop += 8;
3586 else
3587 label_selse:
3588 val = *f_next_stack;
3589 f_next_stack += rsize;
3590 label_sover:
3593 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3594 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3595 res_addr = create_tmp_var (ptr_type_node);
3597 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3598 unshare_expr (next_gp_limit));
3599 tmp = build3 (COND_EXPR, void_type_node, tmp,
3600 build1 (GOTO_EXPR, void_type_node,
3601 unshare_expr (label_selse)),
3602 NULL_TREE);
3603 gimplify_and_add (tmp, pre_p);
3605 if (ivc2_vec)
3607 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3608 gimplify_and_add (tmp, pre_p);
3610 else
3612 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3613 gimplify_and_add (tmp, pre_p);
3616 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3617 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3619 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3620 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3622 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3623 gimplify_and_add (tmp, pre_p);
3625 /* - - */
3627 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3628 gimplify_and_add (tmp, pre_p);
3630 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3631 gimplify_and_add (tmp, pre_p);
3633 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3634 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3636 /* - - */
3638 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3639 gimplify_and_add (tmp, pre_p);
3641 res_addr = fold_convert (build_pointer_type (type), res_addr);
3643 if (by_reference)
3644 res_addr = build_va_arg_indirect_ref (res_addr);
3646 return build_va_arg_indirect_ref (res_addr);
3649 void
3650 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3651 rtx libname ATTRIBUTE_UNUSED,
3652 tree fndecl ATTRIBUTE_UNUSED)
3654 pcum->nregs = 0;
3656 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3657 pcum->vliw = 1;
3658 else
3659 pcum->vliw = 0;
3662 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3663 larger than 4 bytes are passed indirectly. Return value in 0,
3664 unless bigger than 4 bytes, then the caller passes a pointer as the
3665 first arg. For varargs, we copy $1..$4 to the stack. */
3667 static rtx
3668 mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
3669 const_tree type ATTRIBUTE_UNUSED,
3670 bool named ATTRIBUTE_UNUSED)
3672 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3674 /* VOIDmode is a signal for the backend to pass data to the call
3675 expander via the second operand to the call pattern. We use
3676 this to determine whether to use "jsr" or "jsrv". */
3677 if (mode == VOIDmode)
3678 return GEN_INT (cum->vliw);
3680 /* If we havn't run out of argument registers, return the next. */
3681 if (cum->nregs < 4)
3683 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3684 return gen_rtx_REG (mode, cum->nregs + 49);
3685 else
3686 return gen_rtx_REG (mode, cum->nregs + 1);
3689 /* Otherwise the argument goes on the stack. */
3690 return NULL_RTX;
3693 static bool
3694 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3695 machine_mode mode,
3696 const_tree type,
3697 bool named ATTRIBUTE_UNUSED)
3699 int size = bytesize (type, mode);
3701 /* This is non-obvious, but yes, large values passed after we've run
3702 out of registers are *still* passed by reference - we put the
3703 address of the parameter on the stack, as well as putting the
3704 parameter itself elsewhere on the stack. */
3706 if (size <= 0 || size > 8)
3707 return true;
3708 if (size <= 4)
3709 return false;
3710 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3711 && type != NULL_TREE && VECTOR_TYPE_P (type))
3712 return false;
3713 return true;
3716 static void
3717 mep_function_arg_advance (cumulative_args_t pcum,
3718 machine_mode mode ATTRIBUTE_UNUSED,
3719 const_tree type ATTRIBUTE_UNUSED,
3720 bool named ATTRIBUTE_UNUSED)
3722 get_cumulative_args (pcum)->nregs += 1;
3725 bool
3726 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3728 int size = bytesize (type, BLKmode);
3729 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3730 return size > 0 && size <= 8 ? 0 : 1;
3731 return size > 0 && size <= 4 ? 0 : 1;
3734 static bool
3735 mep_narrow_volatile_bitfield (void)
3737 return true;
3738 return false;
3741 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3744 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3746 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3747 return gen_rtx_REG (TYPE_MODE (type), 48);
3748 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3751 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3754 mep_libcall_value (machine_mode mode)
3756 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3759 /* Handle pipeline hazards. */
3761 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3762 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3764 static int prev_opcode = 0;
3766 /* This isn't as optimal as it could be, because we don't know what
3767 control register the STC opcode is storing in. We only need to add
3768 the nop if it's the relevant register, but we add it for irrelevant
3769 registers also. */
3771 void
3772 mep_asm_output_opcode (FILE *file, const char *ptr)
3774 int this_opcode = op_none;
3775 const char *hazard = 0;
3777 switch (*ptr)
3779 case 'f':
3780 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3781 this_opcode = op_fsft;
3782 break;
3783 case 'r':
3784 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3785 this_opcode = op_ret;
3786 break;
3787 case 's':
3788 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3789 this_opcode = op_stc;
3790 break;
3793 if (prev_opcode == op_stc && this_opcode == op_fsft)
3794 hazard = "nop";
3795 if (prev_opcode == op_stc && this_opcode == op_ret)
3796 hazard = "nop";
3798 if (hazard)
3799 fprintf(file, "%s\t# %s-%s hazard\n\t",
3800 hazard, opnames[prev_opcode], opnames[this_opcode]);
3802 prev_opcode = this_opcode;
3805 /* Handle attributes. */
3807 static tree
3808 mep_validate_based_tiny (tree *node, tree name, tree args,
3809 int flags ATTRIBUTE_UNUSED, bool *no_add)
3811 if (TREE_CODE (*node) != VAR_DECL
3812 && TREE_CODE (*node) != POINTER_TYPE
3813 && TREE_CODE (*node) != TYPE_DECL)
3815 warning (0, "%qE attribute only applies to variables", name);
3816 *no_add = true;
3818 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3820 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3822 warning (0, "address region attributes not allowed with auto storage class");
3823 *no_add = true;
3825 /* Ignore storage attribute of pointed to variable: char __far * x; */
3826 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3828 warning (0, "address region attributes on pointed-to types ignored");
3829 *no_add = true;
3833 return NULL_TREE;
3836 static int
3837 mep_multiple_address_regions (tree list, bool check_section_attr)
3839 tree a;
3840 int count_sections = 0;
3841 int section_attr_count = 0;
3843 for (a = list; a; a = TREE_CHAIN (a))
3845 if (is_attribute_p ("based", TREE_PURPOSE (a))
3846 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3847 || is_attribute_p ("near", TREE_PURPOSE (a))
3848 || is_attribute_p ("far", TREE_PURPOSE (a))
3849 || is_attribute_p ("io", TREE_PURPOSE (a)))
3850 count_sections ++;
3851 if (check_section_attr)
3852 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3855 if (check_section_attr)
3856 return section_attr_count;
3857 else
3858 return count_sections;
3861 #define MEP_ATTRIBUTES(decl) \
3862 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3863 : DECL_ATTRIBUTES (decl) \
3864 ? (DECL_ATTRIBUTES (decl)) \
3865 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3867 static tree
3868 mep_validate_near_far (tree *node, tree name, tree args,
3869 int flags ATTRIBUTE_UNUSED, bool *no_add)
3871 if (TREE_CODE (*node) != VAR_DECL
3872 && TREE_CODE (*node) != FUNCTION_DECL
3873 && TREE_CODE (*node) != METHOD_TYPE
3874 && TREE_CODE (*node) != POINTER_TYPE
3875 && TREE_CODE (*node) != TYPE_DECL)
3877 warning (0, "%qE attribute only applies to variables and functions",
3878 name);
3879 *no_add = true;
3881 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3883 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3885 warning (0, "address region attributes not allowed with auto storage class");
3886 *no_add = true;
3888 /* Ignore storage attribute of pointed to variable: char __far * x; */
3889 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3891 warning (0, "address region attributes on pointed-to types ignored");
3892 *no_add = true;
3895 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3897 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3898 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3899 DECL_ATTRIBUTES (*node) = NULL_TREE;
3901 return NULL_TREE;
3904 static tree
3905 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3906 int flags ATTRIBUTE_UNUSED, bool *no_add)
3908 if (TREE_CODE (*node) != FUNCTION_DECL
3909 && TREE_CODE (*node) != METHOD_TYPE)
3911 warning (0, "%qE attribute only applies to functions", name);
3912 *no_add = true;
3914 return NULL_TREE;
3917 static tree
3918 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3919 int flags ATTRIBUTE_UNUSED, bool *no_add)
3921 tree function_type;
3923 if (TREE_CODE (*node) != FUNCTION_DECL)
3925 warning (0, "%qE attribute only applies to functions", name);
3926 *no_add = true;
3927 return NULL_TREE;
3930 if (DECL_DECLARED_INLINE_P (*node))
3931 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3932 DECL_UNINLINABLE (*node) = 1;
3934 function_type = TREE_TYPE (*node);
3936 if (TREE_TYPE (function_type) != void_type_node)
3937 error ("interrupt function must have return type of void");
3939 if (prototype_p (function_type)
3940 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3941 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3942 error ("interrupt function must have no arguments");
3944 return NULL_TREE;
3947 static tree
3948 mep_validate_io_cb (tree *node, tree name, tree args,
3949 int flags ATTRIBUTE_UNUSED, bool *no_add)
3951 if (TREE_CODE (*node) != VAR_DECL)
3953 warning (0, "%qE attribute only applies to variables", name);
3954 *no_add = true;
3957 if (args != NULL_TREE)
3959 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3960 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3961 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3963 warning (0, "%qE attribute allows only an integer constant argument",
3964 name);
3965 *no_add = true;
3969 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3970 TREE_THIS_VOLATILE (*node) = 1;
3972 return NULL_TREE;
3975 static tree
3976 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3977 int flags ATTRIBUTE_UNUSED, bool *no_add)
3979 if (TREE_CODE (*node) != FUNCTION_TYPE
3980 && TREE_CODE (*node) != FUNCTION_DECL
3981 && TREE_CODE (*node) != METHOD_TYPE
3982 && TREE_CODE (*node) != FIELD_DECL
3983 && TREE_CODE (*node) != TYPE_DECL)
3985 static int gave_pointer_note = 0;
3986 static int gave_array_note = 0;
3987 static const char * given_type = NULL;
3989 given_type = get_tree_code_name (TREE_CODE (*node));
3990 if (TREE_CODE (*node) == POINTER_TYPE)
3991 given_type = "pointers";
3992 if (TREE_CODE (*node) == ARRAY_TYPE)
3993 given_type = "arrays";
3995 if (given_type)
3996 warning (0, "%qE attribute only applies to functions, not %s",
3997 name, given_type);
3998 else
3999 warning (0, "%qE attribute only applies to functions",
4000 name);
4001 *no_add = true;
4003 if (TREE_CODE (*node) == POINTER_TYPE
4004 && !gave_pointer_note)
4006 inform (input_location,
4007 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4008 " typedef int (__vliw *vfuncptr) ();");
4009 gave_pointer_note = 1;
4012 if (TREE_CODE (*node) == ARRAY_TYPE
4013 && !gave_array_note)
4015 inform (input_location,
4016 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4017 " typedef int (__vliw *vfuncptr[]) ();");
4018 gave_array_note = 1;
4021 if (!TARGET_VLIW)
4022 error ("VLIW functions are not allowed without a VLIW configuration");
4023 return NULL_TREE;
4026 static const struct attribute_spec mep_attribute_table[11] =
4028 /* name min max decl type func handler
4029 affects_type_identity */
4030 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4031 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4032 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4033 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4034 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4035 false },
4036 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4037 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4038 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4039 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4040 { NULL, 0, 0, false, false, false, NULL, false }
4043 static bool
4044 mep_function_attribute_inlinable_p (const_tree callee)
4046 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4047 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4048 return (lookup_attribute ("disinterrupt", attrs) == 0
4049 && lookup_attribute ("interrupt", attrs) == 0);
4052 static bool
4053 mep_can_inline_p (tree caller, tree callee)
4055 if (TREE_CODE (callee) == ADDR_EXPR)
4056 callee = TREE_OPERAND (callee, 0);
4058 if (!mep_vliw_function_p (caller)
4059 && mep_vliw_function_p (callee))
4061 return false;
4063 return true;
4066 #define FUNC_CALL 1
4067 #define FUNC_DISINTERRUPT 2
4070 struct GTY(()) pragma_entry {
4071 int used;
4072 int flag;
4075 /* Hash table of farcall-tagged sections. */
4076 static GTY(()) hash_map<nofree_string_hash, pragma_entry> *pragma_htab;
4078 static void
4079 mep_note_pragma_flag (const char *funcname, int flag)
4081 if (!pragma_htab)
4082 pragma_htab = hash_map<nofree_string_hash, pragma_entry>::create_ggc (31);
4084 bool existed;
4085 const char *name = ggc_strdup (funcname);
4086 pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4087 if (!existed)
4089 slot->flag = 0;
4090 slot->used = 0;
4092 slot->flag |= flag;
4095 static bool
4096 mep_lookup_pragma_flag (const char *funcname, int flag)
4098 if (!pragma_htab)
4099 return false;
4101 if (funcname[0] == '@' && funcname[2] == '.')
4102 funcname += 3;
4104 pragma_entry *slot = pragma_htab->get (funcname);
4105 if (slot && (slot->flag & flag))
4107 slot->used |= flag;
4108 return true;
4110 return false;
4113 bool
4114 mep_lookup_pragma_call (const char *funcname)
4116 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4119 void
4120 mep_note_pragma_call (const char *funcname)
4122 mep_note_pragma_flag (funcname, FUNC_CALL);
4125 bool
4126 mep_lookup_pragma_disinterrupt (const char *funcname)
4128 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4131 void
4132 mep_note_pragma_disinterrupt (const char *funcname)
4134 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4137 bool
4138 note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4139 void *)
4141 if ((e.flag & FUNC_DISINTERRUPT)
4142 && !(e.used & FUNC_DISINTERRUPT))
4143 warning (0, "\"#pragma disinterrupt %s\" not used", s);
4144 return 1;
4147 void
4148 mep_file_cleanups (void)
4150 if (pragma_htab)
4151 pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
4154 /* These three functions provide a bridge between the pramgas that
4155 affect register classes, and the functions that maintain them. We
4156 can't call those functions directly as pragma handling is part of
4157 the front end and doesn't have direct access to them. */
4159 void
4160 mep_save_register_info (void)
4162 save_register_info ();
4165 void
4166 mep_reinit_regs (void)
4168 reinit_regs ();
4171 void
4172 mep_init_regs (void)
4174 init_regs ();
4179 static int
4180 mep_attrlist_to_encoding (tree list, tree decl)
4182 if (mep_multiple_address_regions (list, false) > 1)
4184 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4185 TREE_PURPOSE (TREE_CHAIN (list)),
4186 DECL_NAME (decl),
4187 DECL_SOURCE_LINE (decl));
4188 TREE_CHAIN (list) = NULL_TREE;
4191 while (list)
4193 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4194 return 'b';
4195 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4196 return 't';
4197 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4198 return 'n';
4199 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4200 return 'f';
4201 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4203 if (TREE_VALUE (list)
4204 && TREE_VALUE (TREE_VALUE (list))
4205 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4207 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4208 if (location >= 0
4209 && location <= 0x1000000)
4210 return 'i';
4212 return 'I';
4214 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4215 return 'c';
4216 list = TREE_CHAIN (list);
4218 if (TARGET_TF
4219 && TREE_CODE (decl) == FUNCTION_DECL
4220 && DECL_SECTION_NAME (decl) == 0)
4221 return 'f';
4222 return 0;
4225 static int
4226 mep_comp_type_attributes (const_tree t1, const_tree t2)
4228 int vliw1, vliw2;
4230 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4231 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4233 if (vliw1 != vliw2)
4234 return 0;
4236 return 1;
4239 static void
4240 mep_insert_attributes (tree decl, tree *attributes)
4242 int size;
4243 const char *secname = 0;
4244 tree attrib, attrlist;
4245 char encoding;
4247 if (TREE_CODE (decl) == FUNCTION_DECL)
4249 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4251 if (mep_lookup_pragma_disinterrupt (funcname))
4253 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4254 *attributes = chainon (*attributes, attrib);
4258 if (TREE_CODE (decl) != VAR_DECL
4259 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4260 return;
4262 if (TREE_READONLY (decl) && TARGET_DC)
4263 /* -mdc means that const variables default to the near section,
4264 regardless of the size cutoff. */
4265 return;
4267 /* User specified an attribute, so override the default.
4268 Ignore storage attribute of pointed to variable. char __far * x; */
4269 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4271 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4272 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4273 else if (DECL_ATTRIBUTES (decl) && *attributes)
4274 DECL_ATTRIBUTES (decl) = NULL_TREE;
4277 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4278 encoding = mep_attrlist_to_encoding (attrlist, decl);
4279 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4281 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4282 encoding = mep_attrlist_to_encoding (attrlist, decl);
4284 if (encoding)
4286 /* This means that the declaration has a specific section
4287 attribute, so we should not apply the default rules. */
4289 if (encoding == 'i' || encoding == 'I')
4291 tree attr = lookup_attribute ("io", attrlist);
4292 if (attr
4293 && TREE_VALUE (attr)
4294 && TREE_VALUE (TREE_VALUE(attr)))
4296 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4297 static tree previous_value = 0;
4298 static int previous_location = 0;
4299 static tree previous_name = 0;
4301 /* We take advantage of the fact that gcc will reuse the
4302 same tree pointer when applying an attribute to a
4303 list of decls, but produce a new tree for attributes
4304 on separate source lines, even when they're textually
4305 identical. This is the behavior we want. */
4306 if (TREE_VALUE (attr) == previous_value
4307 && location == previous_location)
4309 warning(0, "__io address 0x%x is the same for %qE and %qE",
4310 location, previous_name, DECL_NAME (decl));
4312 previous_name = DECL_NAME (decl);
4313 previous_location = location;
4314 previous_value = TREE_VALUE (attr);
4317 return;
4321 /* Declarations of arrays can change size. Don't trust them. */
4322 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4323 size = 0;
4324 else
4325 size = int_size_in_bytes (TREE_TYPE (decl));
4327 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4329 if (TREE_PUBLIC (decl)
4330 || DECL_EXTERNAL (decl)
4331 || TREE_STATIC (decl))
4333 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4334 int key = 0;
4336 while (*name)
4337 key += *name++;
4339 switch (key & 3)
4341 case 0:
4342 secname = "based";
4343 break;
4344 case 1:
4345 secname = "tiny";
4346 break;
4347 case 2:
4348 secname = "far";
4349 break;
4350 default:
4355 else
4357 if (size <= mep_based_cutoff && size > 0)
4358 secname = "based";
4359 else if (size <= mep_tiny_cutoff && size > 0)
4360 secname = "tiny";
4361 else if (TARGET_L)
4362 secname = "far";
4365 if (mep_const_section && TREE_READONLY (decl))
4367 if (strcmp (mep_const_section, "tiny") == 0)
4368 secname = "tiny";
4369 else if (strcmp (mep_const_section, "near") == 0)
4370 return;
4371 else if (strcmp (mep_const_section, "far") == 0)
4372 secname = "far";
4375 if (!secname)
4376 return;
4378 if (!mep_multiple_address_regions (*attributes, true)
4379 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4381 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4383 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4384 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4385 and mep_validate_based_tiny. */
4386 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4390 static void
4391 mep_encode_section_info (tree decl, rtx rtl, int first)
4393 rtx rtlname;
4394 const char *oldname;
4395 const char *secname;
4396 char encoding;
4397 char *newname;
4398 tree idp;
4399 int maxsize;
4400 tree type;
4401 tree mep_attributes;
4403 if (! first)
4404 return;
4406 if (TREE_CODE (decl) != VAR_DECL
4407 && TREE_CODE (decl) != FUNCTION_DECL)
4408 return;
4410 rtlname = XEXP (rtl, 0);
4411 if (GET_CODE (rtlname) == SYMBOL_REF)
4412 oldname = XSTR (rtlname, 0);
4413 else if (GET_CODE (rtlname) == MEM
4414 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4415 oldname = XSTR (XEXP (rtlname, 0), 0);
4416 else
4417 gcc_unreachable ();
4419 type = TREE_TYPE (decl);
4420 if (type == error_mark_node)
4421 return;
4422 mep_attributes = MEP_ATTRIBUTES (decl);
4424 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4426 if (encoding)
4428 newname = (char *) alloca (strlen (oldname) + 4);
4429 sprintf (newname, "@%c.%s", encoding, oldname);
4430 idp = get_identifier (newname);
4431 XEXP (rtl, 0) =
4432 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4433 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4434 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4436 switch (encoding)
4438 case 'b':
4439 maxsize = 128;
4440 secname = "based";
4441 break;
4442 case 't':
4443 maxsize = 65536;
4444 secname = "tiny";
4445 break;
4446 case 'n':
4447 maxsize = 0x1000000;
4448 secname = "near";
4449 break;
4450 default:
4451 maxsize = 0;
4452 secname = 0;
4453 break;
4455 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4457 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4458 oldname,
4459 (long) int_size_in_bytes (TREE_TYPE (decl)),
4460 secname,
4461 maxsize);
4466 const char *
4467 mep_strip_name_encoding (const char *sym)
4469 while (1)
4471 if (*sym == '*')
4472 sym++;
4473 else if (*sym == '@' && sym[2] == '.')
4474 sym += 3;
4475 else
4476 return sym;
4480 static section *
4481 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4482 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4484 int readonly = 1;
4485 int encoding;
4487 switch (TREE_CODE (decl))
4489 case VAR_DECL:
4490 if (!TREE_READONLY (decl)
4491 || TREE_SIDE_EFFECTS (decl)
4492 || !DECL_INITIAL (decl)
4493 || (DECL_INITIAL (decl) != error_mark_node
4494 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4495 readonly = 0;
4496 break;
4497 case CONSTRUCTOR:
4498 if (! TREE_CONSTANT (decl))
4499 readonly = 0;
4500 break;
4502 default:
4503 break;
4506 if (TREE_CODE (decl) == FUNCTION_DECL)
4508 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4510 if (name[0] == '@' && name[2] == '.')
4511 encoding = name[1];
4512 else
4513 encoding = 0;
4515 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4516 mep_unique_section (decl, 0);
4517 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4519 if (encoding == 'f')
4520 return vftext_section;
4521 else
4522 return vtext_section;
4524 else if (encoding == 'f')
4525 return ftext_section;
4526 else
4527 return text_section;
4530 if (TREE_CODE (decl) == VAR_DECL)
4532 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4534 if (name[0] == '@' && name[2] == '.')
4535 switch (name[1])
4537 case 'b':
4538 return based_section;
4540 case 't':
4541 if (readonly)
4542 return srodata_section;
4543 if (DECL_INITIAL (decl))
4544 return sdata_section;
4545 return tinybss_section;
4547 case 'f':
4548 if (readonly)
4549 return frodata_section;
4550 return far_section;
4552 case 'i':
4553 case 'I':
4554 error_at (DECL_SOURCE_LOCATION (decl),
4555 "variable %D of type %<io%> must be uninitialized", decl);
4556 return data_section;
4558 case 'c':
4559 error_at (DECL_SOURCE_LOCATION (decl),
4560 "variable %D of type %<cb%> must be uninitialized", decl);
4561 return data_section;
4565 if (readonly)
4566 return readonly_data_section;
4568 return data_section;
4571 static void
4572 mep_unique_section (tree decl, int reloc)
4574 static const char *prefixes[][2] =
4576 { ".text.", ".gnu.linkonce.t." },
4577 { ".rodata.", ".gnu.linkonce.r." },
4578 { ".data.", ".gnu.linkonce.d." },
4579 { ".based.", ".gnu.linkonce.based." },
4580 { ".sdata.", ".gnu.linkonce.s." },
4581 { ".far.", ".gnu.linkonce.far." },
4582 { ".ftext.", ".gnu.linkonce.ft." },
4583 { ".frodata.", ".gnu.linkonce.frd." },
4584 { ".srodata.", ".gnu.linkonce.srd." },
4585 { ".vtext.", ".gnu.linkonce.v." },
4586 { ".vftext.", ".gnu.linkonce.vf." }
4588 int sec = 2; /* .data */
4589 int len;
4590 const char *name, *prefix;
4591 char *string;
4593 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4594 if (DECL_RTL (decl))
4595 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4597 if (TREE_CODE (decl) == FUNCTION_DECL)
4599 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4600 sec = 9; /* .vtext */
4601 else
4602 sec = 0; /* .text */
4604 else if (decl_readonly_section (decl, reloc))
4605 sec = 1; /* .rodata */
4607 if (name[0] == '@' && name[2] == '.')
4609 switch (name[1])
4611 case 'b':
4612 sec = 3; /* .based */
4613 break;
4614 case 't':
4615 if (sec == 1)
4616 sec = 8; /* .srodata */
4617 else
4618 sec = 4; /* .sdata */
4619 break;
4620 case 'f':
4621 if (sec == 0)
4622 sec = 6; /* .ftext */
4623 else if (sec == 9)
4624 sec = 10; /* .vftext */
4625 else if (sec == 1)
4626 sec = 7; /* .frodata */
4627 else
4628 sec = 5; /* .far. */
4629 break;
4631 name += 3;
4634 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4635 len = strlen (name) + strlen (prefix);
4636 string = (char *) alloca (len + 1);
4638 sprintf (string, "%s%s", prefix, name);
4640 set_decl_section_name (decl, string);
4643 /* Given a decl, a section name, and whether the decl initializer
4644 has relocs, choose attributes for the section. */
4646 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4648 static unsigned int
4649 mep_section_type_flags (tree decl, const char *name, int reloc)
4651 unsigned int flags = default_section_type_flags (decl, name, reloc);
4653 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4654 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4655 flags |= SECTION_MEP_VLIW;
4657 return flags;
4660 /* Switch to an arbitrary section NAME with attributes as specified
4661 by FLAGS. ALIGN specifies any known alignment requirements for
4662 the section; 0 if the default should be used.
4664 Differs from the standard ELF version only in support of VLIW mode. */
4666 static void
4667 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4669 char flagchars[8], *f = flagchars;
4670 const char *type;
4672 if (!(flags & SECTION_DEBUG))
4673 *f++ = 'a';
4674 if (flags & SECTION_WRITE)
4675 *f++ = 'w';
4676 if (flags & SECTION_CODE)
4677 *f++ = 'x';
4678 if (flags & SECTION_SMALL)
4679 *f++ = 's';
4680 if (flags & SECTION_MEP_VLIW)
4681 *f++ = 'v';
4682 *f = '\0';
4684 if (flags & SECTION_BSS)
4685 type = "nobits";
4686 else
4687 type = "progbits";
4689 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4690 name, flagchars, type);
4692 if (flags & SECTION_CODE)
4693 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4694 asm_out_file);
4697 void
4698 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4699 int size, int align, int global)
4701 /* We intentionally don't use mep_section_tag() here. */
4702 if (name[0] == '@'
4703 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4704 && name[2] == '.')
4706 int location = -1;
4707 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4708 DECL_ATTRIBUTES (decl));
4709 if (attr
4710 && TREE_VALUE (attr)
4711 && TREE_VALUE (TREE_VALUE(attr)))
4712 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4713 if (location == -1)
4714 return;
4715 if (global)
4717 fprintf (stream, "\t.globl\t");
4718 assemble_name (stream, name);
4719 fprintf (stream, "\n");
4721 assemble_name (stream, name);
4722 fprintf (stream, " = %d\n", location);
4723 return;
4725 if (name[0] == '@' && name[2] == '.')
4727 const char *sec = 0;
4728 switch (name[1])
4730 case 'b':
4731 switch_to_section (based_section);
4732 sec = ".based";
4733 break;
4734 case 't':
4735 switch_to_section (tinybss_section);
4736 sec = ".sbss";
4737 break;
4738 case 'f':
4739 switch_to_section (farbss_section);
4740 sec = ".farbss";
4741 break;
4743 if (sec)
4745 const char *name2;
4746 int p2align = 0;
4748 while (align > BITS_PER_UNIT)
4750 align /= 2;
4751 p2align ++;
4753 name2 = targetm.strip_name_encoding (name);
4754 if (global)
4755 fprintf (stream, "\t.globl\t%s\n", name2);
4756 fprintf (stream, "\t.p2align %d\n", p2align);
4757 fprintf (stream, "\t.type\t%s,@object\n", name2);
4758 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4759 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4760 return;
4764 if (!global)
4766 fprintf (stream, "\t.local\t");
4767 assemble_name (stream, name);
4768 fprintf (stream, "\n");
4770 fprintf (stream, "\t.comm\t");
4771 assemble_name (stream, name);
4772 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4775 /* Trampolines. */
4777 static void
4778 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4780 rtx addr = XEXP (m_tramp, 0);
4781 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4783 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4784 LCT_NORMAL, VOIDmode, 3,
4785 addr, Pmode,
4786 fnaddr, Pmode,
4787 static_chain, Pmode);
4790 /* Experimental Reorg. */
4792 static bool
4793 mep_mentioned_p (rtx in,
4794 rtx reg, /* NULL for mem */
4795 int modes_too) /* if nonzero, modes must match also. */
4797 const char *fmt;
4798 int i;
4799 enum rtx_code code;
4801 if (in == 0)
4802 return false;
4803 if (reg && GET_CODE (reg) != REG)
4804 return false;
4806 if (GET_CODE (in) == LABEL_REF)
4807 return (reg == 0);
4809 code = GET_CODE (in);
4811 switch (code)
4813 case MEM:
4814 if (reg)
4815 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4816 return true;
4818 case REG:
4819 if (!reg)
4820 return false;
4821 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4822 return false;
4823 return (REGNO (in) == REGNO (reg));
4825 case SCRATCH:
4826 case CC0:
4827 case PC:
4828 case CONST_INT:
4829 case CONST_DOUBLE:
4830 return false;
4832 default:
4833 break;
4836 /* Set's source should be read-only. */
4837 if (code == SET && !reg)
4838 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4840 fmt = GET_RTX_FORMAT (code);
4842 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4844 if (fmt[i] == 'E')
4846 register int j;
4847 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4848 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4849 return true;
4851 else if (fmt[i] == 'e'
4852 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4853 return true;
4855 return false;
4858 #define EXPERIMENTAL_REGMOVE_REORG 1
4860 #if EXPERIMENTAL_REGMOVE_REORG
4862 static int
4863 mep_compatible_reg_class (int r1, int r2)
4865 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4866 return 1;
4867 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4868 return 1;
4869 return 0;
4872 static void
4873 mep_reorg_regmove (rtx_insn *insns)
4875 rtx_insn *insn, *next, *follow;
4876 rtx pat, *where;
4877 int count = 0, done = 0, replace, before = 0;
4879 if (dump_file)
4880 for (insn = insns; insn; insn = NEXT_INSN (insn))
4881 if (NONJUMP_INSN_P (insn))
4882 before++;
4884 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4885 set that uses the r2 and r2 dies there. We replace r2 with r1
4886 and see if it's still a valid insn. If so, delete the first set.
4887 Copied from reorg.c. */
4889 while (!done)
4891 done = 1;
4892 for (insn = insns; insn; insn = next)
4894 next = next_nonnote_nondebug_insn (insn);
4895 if (! NONJUMP_INSN_P (insn))
4896 continue;
4897 pat = PATTERN (insn);
4899 replace = 0;
4901 if (GET_CODE (pat) == SET
4902 && GET_CODE (SET_SRC (pat)) == REG
4903 && GET_CODE (SET_DEST (pat)) == REG
4904 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4905 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4907 follow = next_nonnote_nondebug_insn (insn);
4908 if (dump_file)
4909 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4911 while (follow && NONJUMP_INSN_P (follow)
4912 && GET_CODE (PATTERN (follow)) == SET
4913 && !dead_or_set_p (follow, SET_SRC (pat))
4914 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4915 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4917 if (dump_file)
4918 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4919 follow = next_nonnote_insn (follow);
4922 if (dump_file)
4923 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4924 if (follow && NONJUMP_INSN_P (follow)
4925 && GET_CODE (PATTERN (follow)) == SET
4926 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4928 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4930 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4932 replace = 1;
4933 where = & SET_SRC (PATTERN (follow));
4936 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4938 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4940 replace = 1;
4941 where = & PATTERN (follow);
4947 /* If so, follow is the corresponding insn */
4948 if (replace)
4950 if (dump_file)
4952 rtx_insn *x;
4954 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4955 for (x = insn; x ;x = NEXT_INSN (x))
4957 print_rtl_single (dump_file, x);
4958 if (x == follow)
4959 break;
4960 fprintf (dump_file, "\n");
4964 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4965 follow, where))
4967 count ++;
4968 delete_insn (insn);
4969 if (dump_file)
4971 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4972 print_rtl_single (dump_file, follow);
4974 done = 0;
4980 if (dump_file)
4982 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
4983 fprintf (dump_file, "=====\n");
4986 #endif
4989 /* Figure out where to put LABEL, which is the label for a repeat loop.
4990 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
4991 the loop ends just before LAST_INSN. If SHARED, insns other than the
4992 "repeat" might use LABEL to jump to the loop's continuation point.
4994 Return the last instruction in the adjusted loop. */
4996 static rtx_insn *
4997 mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
4998 bool including, bool shared)
5000 rtx_insn *next, *prev;
5001 int count = 0, code, icode;
5003 if (dump_file)
5004 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5005 INSN_UID (last_insn));
5007 /* Set PREV to the last insn in the loop. */
5008 prev = last_insn;
5009 if (!including)
5010 prev = PREV_INSN (prev);
5012 /* Set NEXT to the next insn after the repeat label. */
5013 next = last_insn;
5014 if (!shared)
5015 while (prev != 0)
5017 code = GET_CODE (prev);
5018 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5019 break;
5021 if (INSN_P (prev))
5023 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5024 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
5026 /* Other insns that should not be in the last two opcodes. */
5027 icode = recog_memoized (prev);
5028 if (icode < 0
5029 || icode == CODE_FOR_repeat
5030 || icode == CODE_FOR_erepeat
5031 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5032 break;
5034 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5035 is the second instruction in a VLIW bundle. In that case,
5036 loop again: if the first instruction also satisfies the
5037 conditions above then we will reach here again and put
5038 both of them into the repeat epilogue. Otherwise both
5039 should remain outside. */
5040 if (GET_MODE (prev) != BImode)
5042 count++;
5043 next = prev;
5044 if (dump_file)
5045 print_rtl_single (dump_file, next);
5046 if (count == 2)
5047 break;
5050 prev = PREV_INSN (prev);
5053 /* See if we're adding the label immediately after the repeat insn.
5054 If so, we need to separate them with a nop. */
5055 prev = prev_real_insn (next);
5056 if (prev)
5057 switch (recog_memoized (prev))
5059 case CODE_FOR_repeat:
5060 case CODE_FOR_erepeat:
5061 if (dump_file)
5062 fprintf (dump_file, "Adding nop inside loop\n");
5063 emit_insn_before (gen_nop (), next);
5064 break;
5066 default:
5067 break;
5070 /* Insert the label. */
5071 emit_label_before (label, next);
5073 /* Insert the nops. */
5074 if (dump_file && count < 2)
5075 fprintf (dump_file, "Adding %d nop%s\n\n",
5076 2 - count, count == 1 ? "" : "s");
5078 for (; count < 2; count++)
5079 if (including)
5080 last_insn = emit_insn_after (gen_nop (), last_insn);
5081 else
5082 emit_insn_before (gen_nop (), last_insn);
5084 return last_insn;
5088 void
5089 mep_emit_doloop (rtx *operands, int is_end)
5091 rtx tag;
5093 if (cfun->machine->doloop_tags == 0
5094 || cfun->machine->doloop_tag_from_end == is_end)
5096 cfun->machine->doloop_tags++;
5097 cfun->machine->doloop_tag_from_end = is_end;
5100 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5101 if (is_end)
5102 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5103 else
5104 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5108 /* Code for converting doloop_begins and doloop_ends into valid
5109 MeP instructions. A doloop_begin is just a placeholder:
5111 $count = unspec ($count)
5113 where $count is initially the number of iterations - 1.
5114 doloop_end has the form:
5116 if ($count-- == 0) goto label
5118 The counter variable is private to the doloop insns, nothing else
5119 relies on its value.
5121 There are three cases, in decreasing order of preference:
5123 1. A loop has exactly one doloop_begin and one doloop_end.
5124 The doloop_end branches to the first instruction after
5125 the doloop_begin.
5127 In this case we can replace the doloop_begin with a repeat
5128 instruction and remove the doloop_end. I.e.:
5130 $count1 = unspec ($count1)
5131 label:
5133 insn1
5134 insn2
5135 if ($count2-- == 0) goto label
5137 becomes:
5139 repeat $count1,repeat_label
5140 label:
5142 repeat_label:
5143 insn1
5144 insn2
5145 # end repeat
5147 2. As for (1), except there are several doloop_ends. One of them
5148 (call it X) falls through to a label L. All the others fall
5149 through to branches to L.
5151 In this case, we remove X and replace the other doloop_ends
5152 with branches to the repeat label. For example:
5154 $count1 = unspec ($count1)
5155 start:
5157 if ($count2-- == 0) goto label
5158 end:
5160 if ($count3-- == 0) goto label
5161 goto end
5163 becomes:
5165 repeat $count1,repeat_label
5166 start:
5168 repeat_label:
5171 # end repeat
5172 end:
5174 goto repeat_label
5176 3. The fallback case. Replace doloop_begins with:
5178 $count = $count + 1
5180 Replace doloop_ends with the equivalent of:
5182 $count = $count - 1
5183 if ($count == 0) goto label
5185 Note that this might need a scratch register if $count
5186 is stored in memory. */
5188 /* A structure describing one doloop_begin. */
5189 struct mep_doloop_begin {
5190 /* The next doloop_begin with the same tag. */
5191 struct mep_doloop_begin *next;
5193 /* The instruction itself. */
5194 rtx_insn *insn;
5196 /* The initial counter value. This is known to be a general register. */
5197 rtx counter;
5200 /* A structure describing a doloop_end. */
5201 struct mep_doloop_end {
5202 /* The next doloop_end with the same loop tag. */
5203 struct mep_doloop_end *next;
5205 /* The instruction itself. */
5206 rtx_insn *insn;
5208 /* The first instruction after INSN when the branch isn't taken. */
5209 rtx_insn *fallthrough;
5211 /* The location of the counter value. Since doloop_end_internal is a
5212 jump instruction, it has to allow the counter to be stored anywhere
5213 (any non-fixed register or memory location). */
5214 rtx counter;
5216 /* The target label (the place where the insn branches when the counter
5217 isn't zero). */
5218 rtx label;
5220 /* A scratch register. Only available when COUNTER isn't stored
5221 in a general register. */
5222 rtx scratch;
5226 /* One do-while loop. */
5227 struct mep_doloop {
5228 /* All the doloop_begins for this loop (in no particular order). */
5229 struct mep_doloop_begin *begin;
5231 /* All the doloop_ends. When there is more than one, arrange things
5232 so that the first one is the most likely to be X in case (2) above. */
5233 struct mep_doloop_end *end;
5237 /* Return true if LOOP can be converted into repeat/repeat_end form
5238 (that is, if it matches cases (1) or (2) above). */
5240 static bool
5241 mep_repeat_loop_p (struct mep_doloop *loop)
5243 struct mep_doloop_end *end;
5244 rtx fallthrough;
5246 /* There must be exactly one doloop_begin and at least one doloop_end. */
5247 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5248 return false;
5250 /* The first doloop_end (X) must branch back to the insn after
5251 the doloop_begin. */
5252 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5253 return false;
5255 /* All the other doloop_ends must branch to the same place as X.
5256 When the branch isn't taken, they must jump to the instruction
5257 after X. */
5258 fallthrough = loop->end->fallthrough;
5259 for (end = loop->end->next; end != 0; end = end->next)
5260 if (end->label != loop->end->label
5261 || !simplejump_p (end->fallthrough)
5262 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5263 return false;
5265 return true;
5269 /* The main repeat reorg function. See comment above for details. */
5271 static void
5272 mep_reorg_repeat (rtx_insn *insns)
5274 rtx_insn *insn;
5275 struct mep_doloop *loops, *loop;
5276 struct mep_doloop_begin *begin;
5277 struct mep_doloop_end *end;
5279 /* Quick exit if we haven't created any loops. */
5280 if (cfun->machine->doloop_tags == 0)
5281 return;
5283 /* Create an array of mep_doloop structures. */
5284 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5285 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5287 /* Search the function for do-while insns and group them by loop tag. */
5288 for (insn = insns; insn; insn = NEXT_INSN (insn))
5289 if (INSN_P (insn))
5290 switch (recog_memoized (insn))
5292 case CODE_FOR_doloop_begin_internal:
5293 insn_extract (insn);
5294 loop = &loops[INTVAL (recog_data.operand[2])];
5296 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5297 begin->next = loop->begin;
5298 begin->insn = insn;
5299 begin->counter = recog_data.operand[0];
5301 loop->begin = begin;
5302 break;
5304 case CODE_FOR_doloop_end_internal:
5305 insn_extract (insn);
5306 loop = &loops[INTVAL (recog_data.operand[2])];
5308 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5309 end->insn = insn;
5310 end->fallthrough = next_real_insn (insn);
5311 end->counter = recog_data.operand[0];
5312 end->label = recog_data.operand[1];
5313 end->scratch = recog_data.operand[3];
5315 /* If this insn falls through to an unconditional jump,
5316 give it a lower priority than the others. */
5317 if (loop->end != 0 && simplejump_p (end->fallthrough))
5319 end->next = loop->end->next;
5320 loop->end->next = end;
5322 else
5324 end->next = loop->end;
5325 loop->end = end;
5327 break;
5330 /* Convert the insns for each loop in turn. */
5331 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5332 if (mep_repeat_loop_p (loop))
5334 /* Case (1) or (2). */
5335 rtx_code_label *repeat_label;
5336 rtx label_ref;
5338 /* Create a new label for the repeat insn. */
5339 repeat_label = gen_label_rtx ();
5341 /* Replace the doloop_begin with a repeat. */
5342 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5343 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5344 loop->begin->insn);
5345 delete_insn (loop->begin->insn);
5347 /* Insert the repeat label before the first doloop_end.
5348 Fill the gap with nops if there are other doloop_ends. */
5349 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5350 false, loop->end->next != 0);
5352 /* Emit a repeat_end (to improve the readability of the output). */
5353 emit_insn_before (gen_repeat_end (), loop->end->insn);
5355 /* Delete the first doloop_end. */
5356 delete_insn (loop->end->insn);
5358 /* Replace the others with branches to REPEAT_LABEL. */
5359 for (end = loop->end->next; end != 0; end = end->next)
5361 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5362 delete_insn (end->insn);
5363 delete_insn (end->fallthrough);
5366 else
5368 /* Case (3). First replace all the doloop_begins with increment
5369 instructions. */
5370 for (begin = loop->begin; begin != 0; begin = begin->next)
5372 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5373 begin->counter, const1_rtx),
5374 begin->insn);
5375 delete_insn (begin->insn);
5378 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5379 for (end = loop->end; end != 0; end = end->next)
5381 rtx reg;
5383 start_sequence ();
5385 /* Load the counter value into a general register. */
5386 reg = end->counter;
5387 if (!REG_P (reg) || REGNO (reg) > 15)
5389 reg = end->scratch;
5390 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5393 /* Decrement the counter. */
5394 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5395 constm1_rtx));
5397 /* Copy it back to its original location. */
5398 if (reg != end->counter)
5399 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5401 /* Jump back to the start label. */
5402 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5403 end->label));
5404 JUMP_LABEL (insn) = end->label;
5405 LABEL_NUSES (end->label)++;
5407 /* Emit the whole sequence before the doloop_end. */
5408 insn = get_insns ();
5409 end_sequence ();
5410 emit_insn_before (insn, end->insn);
5412 /* Delete the doloop_end. */
5413 delete_insn (end->insn);
5419 static bool
5420 mep_invertable_branch_p (rtx_insn *insn)
5422 rtx cond, set;
5423 enum rtx_code old_code;
5424 int i;
5426 set = PATTERN (insn);
5427 if (GET_CODE (set) != SET)
5428 return false;
5429 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5430 return false;
5431 cond = XEXP (XEXP (set, 1), 0);
5432 old_code = GET_CODE (cond);
5433 switch (old_code)
5435 case EQ:
5436 PUT_CODE (cond, NE);
5437 break;
5438 case NE:
5439 PUT_CODE (cond, EQ);
5440 break;
5441 case LT:
5442 PUT_CODE (cond, GE);
5443 break;
5444 case GE:
5445 PUT_CODE (cond, LT);
5446 break;
5447 default:
5448 return false;
5450 INSN_CODE (insn) = -1;
5451 i = recog_memoized (insn);
5452 PUT_CODE (cond, old_code);
5453 INSN_CODE (insn) = -1;
5454 return i >= 0;
5457 static void
5458 mep_invert_branch (rtx_insn *insn, rtx_insn *after)
5460 rtx cond, set, label;
5461 int i;
5463 set = PATTERN (insn);
5465 gcc_assert (GET_CODE (set) == SET);
5466 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5468 cond = XEXP (XEXP (set, 1), 0);
5469 switch (GET_CODE (cond))
5471 case EQ:
5472 PUT_CODE (cond, NE);
5473 break;
5474 case NE:
5475 PUT_CODE (cond, EQ);
5476 break;
5477 case LT:
5478 PUT_CODE (cond, GE);
5479 break;
5480 case GE:
5481 PUT_CODE (cond, LT);
5482 break;
5483 default:
5484 gcc_unreachable ();
5486 label = gen_label_rtx ();
5487 emit_label_after (label, after);
5488 for (i=1; i<=2; i++)
5489 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5491 rtx ref = XEXP (XEXP (set, 1), i);
5492 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5493 delete_insn (XEXP (ref, 0));
5494 XEXP (ref, 0) = label;
5495 LABEL_NUSES (label) ++;
5496 JUMP_LABEL (insn) = label;
5498 INSN_CODE (insn) = -1;
5499 i = recog_memoized (insn);
5500 gcc_assert (i >= 0);
5503 static void
5504 mep_reorg_erepeat (rtx_insn *insns)
5506 rtx_insn *insn, *prev;
5507 rtx_code_label *l;
5508 rtx x;
5509 int count;
5511 for (insn = insns; insn; insn = NEXT_INSN (insn))
5512 if (JUMP_P (insn)
5513 && mep_invertable_branch_p (insn))
5515 if (dump_file)
5517 fprintf (dump_file, "\n------------------------------\n");
5518 fprintf (dump_file, "erepeat: considering this jump:\n");
5519 print_rtl_single (dump_file, insn);
5521 count = simplejump_p (insn) ? 0 : 1;
5522 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5524 if (CALL_P (prev) || BARRIER_P (prev))
5525 break;
5527 if (prev == JUMP_LABEL (insn))
5529 rtx_insn *newlast;
5530 if (dump_file)
5531 fprintf (dump_file, "found loop top, %d insns\n", count);
5533 if (LABEL_NUSES (prev) == 1)
5534 /* We're the only user, always safe */ ;
5535 else if (LABEL_NUSES (prev) == 2)
5537 /* See if there's a barrier before this label. If
5538 so, we know nobody inside the loop uses it.
5539 But we must be careful to put the erepeat
5540 *after* the label. */
5541 rtx_insn *barrier;
5542 for (barrier = PREV_INSN (prev);
5543 barrier && NOTE_P (barrier);
5544 barrier = PREV_INSN (barrier))
5546 if (barrier && ! BARRIER_P (barrier))
5547 break;
5549 else
5551 /* We don't know who else, within or without our loop, uses this */
5552 if (dump_file)
5553 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5554 break;
5557 /* Generate a label to be used by the erepat insn. */
5558 l = gen_label_rtx ();
5560 /* Insert the erepeat after INSN's target label. */
5561 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5562 LABEL_NUSES (l)++;
5563 emit_insn_after (x, prev);
5565 /* Insert the erepeat label. */
5566 newlast = (mep_insert_repeat_label_last
5567 (insn, l, !simplejump_p (insn), false));
5568 if (simplejump_p (insn))
5570 emit_insn_before (gen_erepeat_end (), insn);
5571 delete_insn (insn);
5573 else
5575 mep_invert_branch (insn, newlast);
5576 emit_insn_after (gen_erepeat_end (), newlast);
5578 break;
5581 if (LABEL_P (prev))
5583 /* A label is OK if there is exactly one user, and we
5584 can find that user before the next label. */
5585 rtx_insn *user = 0;
5586 int safe = 0;
5587 if (LABEL_NUSES (prev) == 1)
5589 for (user = PREV_INSN (prev);
5590 user && (INSN_P (user) || NOTE_P (user));
5591 user = PREV_INSN (user))
5592 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5594 safe = INSN_UID (user);
5595 break;
5598 if (!safe)
5599 break;
5600 if (dump_file)
5601 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5602 safe, INSN_UID (prev));
5605 if (INSN_P (prev))
5607 count ++;
5611 if (dump_file)
5612 fprintf (dump_file, "\n==============================\n");
5615 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5616 always do this on its own. */
5618 static void
5619 mep_jmp_return_reorg (rtx_insn *insns)
5621 rtx_insn *insn, *label, *ret;
5622 int ret_code;
5624 for (insn = insns; insn; insn = NEXT_INSN (insn))
5625 if (simplejump_p (insn))
5627 /* Find the fist real insn the jump jumps to. */
5628 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
5629 while (ret
5630 && (NOTE_P (ret)
5631 || LABEL_P (ret)
5632 || GET_CODE (PATTERN (ret)) == USE))
5633 ret = NEXT_INSN (ret);
5635 if (ret)
5637 /* Is it a return? */
5638 ret_code = recog_memoized (ret);
5639 if (ret_code == CODE_FOR_return_internal
5640 || ret_code == CODE_FOR_eh_return_internal)
5642 /* It is. Replace the jump with a return. */
5643 LABEL_NUSES (label) --;
5644 if (LABEL_NUSES (label) == 0)
5645 delete_insn (label);
5646 PATTERN (insn) = copy_rtx (PATTERN (ret));
5647 INSN_CODE (insn) = -1;
5654 static void
5655 mep_reorg_addcombine (rtx_insn *insns)
5657 rtx_insn *i, *n;
5659 for (i = insns; i; i = NEXT_INSN (i))
5660 if (INSN_P (i)
5661 && INSN_CODE (i) == CODE_FOR_addsi3
5662 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5663 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5664 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5665 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5667 n = NEXT_INSN (i);
5668 if (INSN_P (n)
5669 && INSN_CODE (n) == CODE_FOR_addsi3
5670 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5671 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5672 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5673 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5675 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5676 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5677 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5678 && ic + nc < 32767
5679 && ic + nc > -32768)
5681 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5682 SET_NEXT_INSN (i) = NEXT_INSN (n);
5683 if (NEXT_INSN (i))
5684 SET_PREV_INSN (NEXT_INSN (i)) = i;
5690 /* If this insn adjusts the stack, return the adjustment, else return
5691 zero. */
5692 static int
5693 add_sp_insn_p (rtx_insn *insn)
5695 rtx pat;
5697 if (! single_set (insn))
5698 return 0;
5699 pat = PATTERN (insn);
5700 if (GET_CODE (SET_DEST (pat)) != REG)
5701 return 0;
5702 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5703 return 0;
5704 if (GET_CODE (SET_SRC (pat)) != PLUS)
5705 return 0;
5706 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5707 return 0;
5708 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5709 return 0;
5710 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5711 return 0;
5712 return INTVAL (XEXP (SET_SRC (pat), 1));
5715 /* Check for trivial functions that set up an unneeded stack
5716 frame. */
5717 static void
5718 mep_reorg_noframe (rtx_insn *insns)
5720 rtx_insn *start_frame_insn;
5721 rtx_insn *end_frame_insn = 0;
5722 int sp_adjust, sp2;
5723 rtx sp;
5725 /* The first insn should be $sp = $sp + N */
5726 while (insns && ! INSN_P (insns))
5727 insns = NEXT_INSN (insns);
5728 if (!insns)
5729 return;
5731 sp_adjust = add_sp_insn_p (insns);
5732 if (sp_adjust == 0)
5733 return;
5735 start_frame_insn = insns;
5736 sp = SET_DEST (PATTERN (start_frame_insn));
5738 insns = next_real_insn (insns);
5740 while (insns)
5742 rtx_insn *next = next_real_insn (insns);
5743 if (!next)
5744 break;
5746 sp2 = add_sp_insn_p (insns);
5747 if (sp2)
5749 if (end_frame_insn)
5750 return;
5751 end_frame_insn = insns;
5752 if (sp2 != -sp_adjust)
5753 return;
5755 else if (mep_mentioned_p (insns, sp, 0))
5756 return;
5757 else if (CALL_P (insns))
5758 return;
5760 insns = next;
5763 if (end_frame_insn)
5765 delete_insn (start_frame_insn);
5766 delete_insn (end_frame_insn);
5770 static void
5771 mep_reorg (void)
5773 rtx_insn *insns = get_insns ();
5775 /* We require accurate REG_DEAD notes. */
5776 compute_bb_for_insn ();
5777 df_note_add_problem ();
5778 df_analyze ();
5780 mep_reorg_addcombine (insns);
5781 #if EXPERIMENTAL_REGMOVE_REORG
5782 /* VLIW packing has been done already, so we can't just delete things. */
5783 if (!mep_vliw_function_p (cfun->decl))
5784 mep_reorg_regmove (insns);
5785 #endif
5786 mep_jmp_return_reorg (insns);
5787 mep_bundle_insns (insns);
5788 mep_reorg_repeat (insns);
5789 if (optimize
5790 && !profile_flag
5791 && !profile_arc_flag
5792 && TARGET_OPT_REPEAT
5793 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5794 mep_reorg_erepeat (insns);
5796 /* This may delete *insns so make sure it's last. */
5797 mep_reorg_noframe (insns);
5799 df_finish_pass (false);
5804 /*----------------------------------------------------------------------*/
5805 /* Builtins */
5806 /*----------------------------------------------------------------------*/
5808 /* Element X gives the index into cgen_insns[] of the most general
5809 implementation of intrinsic X. Unimplemented intrinsics are
5810 mapped to -1. */
5811 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5813 /* Element X gives the index of another instruction that is mapped to
5814 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5815 instruction.
5817 Things are set up so that mep_intrinsic_chain[X] < X. */
5818 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5820 /* The bitmask for the current ISA. The ISA masks are declared
5821 in mep-intrin.h. */
5822 unsigned int mep_selected_isa;
5824 struct mep_config {
5825 const char *config_name;
5826 unsigned int isa;
5829 static struct mep_config mep_configs[] = {
5830 #ifdef COPROC_SELECTION_TABLE
5831 COPROC_SELECTION_TABLE,
5832 #endif
5833 { 0, 0 }
5836 /* Initialize the global intrinsics variables above. */
5838 static void
5839 mep_init_intrinsics (void)
5841 size_t i;
5843 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5844 mep_selected_isa = mep_configs[0].isa;
5845 if (mep_config_string != 0)
5846 for (i = 0; mep_configs[i].config_name; i++)
5847 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5849 mep_selected_isa = mep_configs[i].isa;
5850 break;
5853 /* Assume all intrinsics are unavailable. */
5854 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5855 mep_intrinsic_insn[i] = -1;
5857 /* Build up the global intrinsic tables. */
5858 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5859 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5861 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5862 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5864 /* See whether we can directly move values between one coprocessor
5865 register and another. */
5866 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5867 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5868 mep_have_copro_copro_moves_p = true;
5870 /* See whether we can directly move values between core and
5871 coprocessor registers. */
5872 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5873 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5875 mep_have_core_copro_moves_p = 1;
5878 /* Declare all available intrinsic functions. Called once only. */
5880 static tree cp_data_bus_int_type_node;
5881 static tree opaque_vector_type_node;
5882 static tree v8qi_type_node;
5883 static tree v4hi_type_node;
5884 static tree v2si_type_node;
5885 static tree v8uqi_type_node;
5886 static tree v4uhi_type_node;
5887 static tree v2usi_type_node;
5889 static tree
5890 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5892 switch (cr)
5894 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5895 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5896 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5897 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5898 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5899 case cgen_regnum_operand_type_CHAR: return char_type_node;
5900 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5901 case cgen_regnum_operand_type_SI: return intSI_type_node;
5902 case cgen_regnum_operand_type_DI: return intDI_type_node;
5903 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5904 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5905 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5906 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5907 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5908 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5909 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5910 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5911 default:
5912 return void_type_node;
5916 static void
5917 mep_init_builtins (void)
5919 size_t i;
5921 if (TARGET_64BIT_CR_REGS)
5922 cp_data_bus_int_type_node = long_long_integer_type_node;
5923 else
5924 cp_data_bus_int_type_node = long_integer_type_node;
5926 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5927 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5928 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5929 v2si_type_node = build_vector_type (intSI_type_node, 2);
5930 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5931 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5932 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5934 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5936 add_builtin_type ("cp_vector", opaque_vector_type_node);
5938 add_builtin_type ("cp_v8qi", v8qi_type_node);
5939 add_builtin_type ("cp_v4hi", v4hi_type_node);
5940 add_builtin_type ("cp_v2si", v2si_type_node);
5942 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5943 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5944 add_builtin_type ("cp_v2usi", v2usi_type_node);
5946 /* Intrinsics like mep_cadd3 are implemented with two groups of
5947 instructions, one which uses UNSPECs and one which uses a specific
5948 rtl code such as PLUS. Instructions in the latter group belong
5949 to GROUP_KNOWN_CODE.
5951 In such cases, the intrinsic will have two entries in the global
5952 tables above. The unspec form is accessed using builtin functions
5953 while the specific form is accessed using the mep_* enum in
5954 mep-intrin.h.
5956 The idea is that __cop arithmetic and builtin functions have
5957 different optimization requirements. If mep_cadd3() appears in
5958 the source code, the user will surely except gcc to use cadd3
5959 rather than a work-alike such as add3. However, if the user
5960 just writes "a + b", where a or b are __cop variables, it is
5961 reasonable for gcc to choose a core instruction rather than
5962 cadd3 if it believes that is more optimal. */
5963 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5964 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5965 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5967 tree ret_type = void_type_node;
5968 tree bi_type;
5970 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5971 continue;
5973 if (cgen_insns[i].cret_p)
5974 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5976 bi_type = build_function_type_list (ret_type, NULL_TREE);
5977 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5978 bi_type,
5979 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5983 /* Report the unavailablity of the given intrinsic. */
5985 #if 1
5986 static void
5987 mep_intrinsic_unavailable (int intrinsic)
5989 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
5991 if (already_reported_p[intrinsic])
5992 return;
5994 if (mep_intrinsic_insn[intrinsic] < 0)
5995 error ("coprocessor intrinsic %qs is not available in this configuration",
5996 cgen_intrinsics[intrinsic]);
5997 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
5998 error ("%qs is not available in VLIW functions",
5999 cgen_intrinsics[intrinsic]);
6000 else
6001 error ("%qs is not available in non-VLIW functions",
6002 cgen_intrinsics[intrinsic]);
6004 already_reported_p[intrinsic] = 1;
6006 #endif
6009 /* See if any implementation of INTRINSIC is available to the
6010 current function. If so, store the most general implementation
6011 in *INSN_PTR and return true. Return false otherwise. */
6013 static bool
6014 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6016 int i;
6018 i = mep_intrinsic_insn[intrinsic];
6019 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6020 i = mep_intrinsic_chain[i];
6022 if (i >= 0)
6024 *insn_ptr = &cgen_insns[i];
6025 return true;
6027 return false;
6031 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6032 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6033 try using a work-alike instead. In this case, the returned insn
6034 may have three operands rather than two. */
6036 static bool
6037 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6039 size_t i;
6041 if (intrinsic == mep_cmov)
6043 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6044 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6045 return true;
6046 return false;
6048 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6052 /* If ARG is a register operand that is the same size as MODE, convert it
6053 to MODE using a subreg. Otherwise return ARG as-is. */
6055 static rtx
6056 mep_convert_arg (machine_mode mode, rtx arg)
6058 if (GET_MODE (arg) != mode
6059 && register_operand (arg, VOIDmode)
6060 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6061 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6062 return arg;
6066 /* Apply regnum conversions to ARG using the description given by REGNUM.
6067 Return the new argument on success and null on failure. */
6069 static rtx
6070 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6072 if (regnum->count == 0)
6073 return arg;
6075 if (GET_CODE (arg) != CONST_INT
6076 || INTVAL (arg) < 0
6077 || INTVAL (arg) >= regnum->count)
6078 return 0;
6080 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6084 /* Try to make intrinsic argument ARG match the given operand.
6085 UNSIGNED_P is true if the argument has an unsigned type. */
6087 static rtx
6088 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6089 int unsigned_p)
6091 if (GET_CODE (arg) == CONST_INT)
6093 /* CONST_INTs can only be bound to integer operands. */
6094 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6095 return 0;
6097 else if (GET_CODE (arg) == CONST_DOUBLE)
6098 /* These hold vector constants. */;
6099 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6101 /* If the argument is a different size from what's expected, we must
6102 have a value in the right mode class in order to convert it. */
6103 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6104 return 0;
6106 /* If the operand is an rvalue, promote or demote it to match the
6107 operand's size. This might not need extra instructions when
6108 ARG is a register value. */
6109 if (operand->constraint[0] != '=')
6110 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6113 /* If the operand is an lvalue, bind the operand to a new register.
6114 The caller will copy this value into ARG after the main
6115 instruction. By doing this always, we produce slightly more
6116 optimal code. */
6117 /* But not for control registers. */
6118 if (operand->constraint[0] == '='
6119 && (! REG_P (arg)
6120 || ! (CONTROL_REGNO_P (REGNO (arg))
6121 || CCR_REGNO_P (REGNO (arg))
6122 || CR_REGNO_P (REGNO (arg)))
6124 return gen_reg_rtx (operand->mode);
6126 /* Try simple mode punning. */
6127 arg = mep_convert_arg (operand->mode, arg);
6128 if (operand->predicate (arg, operand->mode))
6129 return arg;
6131 /* See if forcing the argument into a register will make it match. */
6132 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6133 arg = force_reg (operand->mode, arg);
6134 else
6135 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6136 if (operand->predicate (arg, operand->mode))
6137 return arg;
6139 return 0;
6143 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6144 function FNNAME. OPERAND describes the operand to which ARGNUM
6145 is mapped. */
6147 static void
6148 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6149 int argnum, tree fnname)
6151 size_t i;
6153 if (GET_CODE (arg) == CONST_INT)
6154 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6155 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6157 const struct cgen_immediate_predicate *predicate;
6158 HOST_WIDE_INT argval;
6160 predicate = &cgen_immediate_predicates[i];
6161 argval = INTVAL (arg);
6162 if (argval < predicate->lower || argval >= predicate->upper)
6163 error ("argument %d of %qE must be in the range %d...%d",
6164 argnum, fnname, predicate->lower, predicate->upper - 1);
6165 else
6166 error ("argument %d of %qE must be a multiple of %d",
6167 argnum, fnname, predicate->align);
6168 return;
6171 error ("incompatible type for argument %d of %qE", argnum, fnname);
6174 static rtx
6175 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6176 rtx subtarget ATTRIBUTE_UNUSED,
6177 machine_mode mode ATTRIBUTE_UNUSED,
6178 int ignore ATTRIBUTE_UNUSED)
6180 rtx pat, op[10], arg[10];
6181 unsigned int a;
6182 int opindex, unsigned_p[10];
6183 tree fndecl, args;
6184 unsigned int n_args;
6185 tree fnname;
6186 const struct cgen_insn *cgen_insn;
6187 const struct insn_data_d *idata;
6188 unsigned int first_arg = 0;
6189 unsigned int builtin_n_args;
6191 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6192 fnname = DECL_NAME (fndecl);
6194 /* Find out which instruction we should emit. Note that some coprocessor
6195 intrinsics may only be available in VLIW mode, or only in normal mode. */
6196 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6198 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6199 return NULL_RTX;
6201 idata = &insn_data[cgen_insn->icode];
6203 builtin_n_args = cgen_insn->num_args;
6205 if (cgen_insn->cret_p)
6207 if (cgen_insn->cret_p > 1)
6208 builtin_n_args ++;
6209 first_arg = 1;
6210 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6211 builtin_n_args --;
6214 /* Evaluate each argument. */
6215 n_args = call_expr_nargs (exp);
6217 if (n_args < builtin_n_args)
6219 error ("too few arguments to %qE", fnname);
6220 return NULL_RTX;
6222 if (n_args > builtin_n_args)
6224 error ("too many arguments to %qE", fnname);
6225 return NULL_RTX;
6228 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6230 tree value;
6232 args = CALL_EXPR_ARG (exp, a - first_arg);
6234 value = args;
6236 #if 0
6237 if (cgen_insn->regnums[a].reference_p)
6239 if (TREE_CODE (value) != ADDR_EXPR)
6241 debug_tree(value);
6242 error ("argument %d of %qE must be an address", a+1, fnname);
6243 return NULL_RTX;
6245 value = TREE_OPERAND (value, 0);
6247 #endif
6249 /* If the argument has been promoted to int, get the unpromoted
6250 value. This is necessary when sub-int memory values are bound
6251 to reference parameters. */
6252 if (TREE_CODE (value) == NOP_EXPR
6253 && TREE_TYPE (value) == integer_type_node
6254 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6255 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6256 < TYPE_PRECISION (TREE_TYPE (value))))
6257 value = TREE_OPERAND (value, 0);
6259 /* If the argument has been promoted to double, get the unpromoted
6260 SFmode value. This is necessary for FMAX support, for example. */
6261 if (TREE_CODE (value) == NOP_EXPR
6262 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6263 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6264 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6265 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6266 value = TREE_OPERAND (value, 0);
6268 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6269 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6270 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6271 if (cgen_insn->regnums[a].reference_p)
6273 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6274 machine_mode pointed_mode = TYPE_MODE (pointed_to);
6276 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6278 if (arg[a] == 0)
6280 error ("argument %d of %qE must be in the range %d...%d",
6281 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6282 return NULL_RTX;
6286 for (a = 0; a < first_arg; a++)
6288 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6289 arg[a] = target;
6290 else
6291 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6294 /* Convert the arguments into a form suitable for the intrinsic.
6295 Report an error if this isn't possible. */
6296 for (opindex = 0; opindex < idata->n_operands; opindex++)
6298 a = cgen_insn->op_mapping[opindex];
6299 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6300 arg[a], unsigned_p[a]);
6301 if (op[opindex] == 0)
6303 mep_incompatible_arg (&idata->operand[opindex],
6304 arg[a], a + 1 - first_arg, fnname);
6305 return NULL_RTX;
6309 /* Emit the instruction. */
6310 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6311 op[5], op[6], op[7], op[8], op[9]);
6313 if (GET_CODE (pat) == SET
6314 && GET_CODE (SET_DEST (pat)) == PC
6315 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6316 emit_jump_insn (pat);
6317 else
6318 emit_insn (pat);
6320 /* Copy lvalues back to their final locations. */
6321 for (opindex = 0; opindex < idata->n_operands; opindex++)
6322 if (idata->operand[opindex].constraint[0] == '=')
6324 a = cgen_insn->op_mapping[opindex];
6325 if (a >= first_arg)
6327 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6328 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6329 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6330 op[opindex]));
6331 else
6333 /* First convert the operand to the right mode, then copy it
6334 into the destination. Doing the conversion as a separate
6335 step (rather than using convert_move) means that we can
6336 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6337 refer to the same register. */
6338 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6339 op[opindex], unsigned_p[a]);
6340 if (!rtx_equal_p (arg[a], op[opindex]))
6341 emit_move_insn (arg[a], op[opindex]);
6346 if (first_arg > 0 && target && target != op[0])
6348 emit_move_insn (target, op[0]);
6351 return target;
6354 static bool
6355 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
6357 return false;
6360 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6361 a global register. */
6363 static bool
6364 global_reg_mentioned_p_1 (const_rtx x)
6366 int regno;
6368 switch (GET_CODE (x))
6370 case SUBREG:
6371 if (REG_P (SUBREG_REG (x)))
6373 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6374 && global_regs[subreg_regno (x)])
6375 return true;
6376 return false;
6378 break;
6380 case REG:
6381 regno = REGNO (x);
6382 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6383 return true;
6384 return false;
6386 case CALL:
6387 /* A non-constant call might use a global register. */
6388 return true;
6390 default:
6391 break;
6394 return false;
6397 /* Returns nonzero if X mentions a global register. */
6399 static bool
6400 global_reg_mentioned_p (rtx x)
6402 if (INSN_P (x))
6404 if (CALL_P (x))
6406 if (! RTL_CONST_OR_PURE_CALL_P (x))
6407 return true;
6408 x = CALL_INSN_FUNCTION_USAGE (x);
6409 if (x == 0)
6410 return false;
6412 else
6413 x = PATTERN (x);
6416 subrtx_iterator::array_type array;
6417 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6418 if (global_reg_mentioned_p_1 (*iter))
6419 return true;
6420 return false;
6422 /* Scheduling hooks for VLIW mode.
6424 Conceptually this is very simple: we have a two-pack architecture
6425 that takes one core insn and one coprocessor insn to make up either
6426 a 32- or 64-bit instruction word (depending on the option bit set in
6427 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6428 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6429 and one 48-bit cop insn or two 32-bit core/cop insns.
6431 In practice, instruction selection will be a bear. Consider in
6432 VL64 mode the following insns
6434 add $1, 1
6435 cmov $cr0, $0
6437 these cannot pack, since the add is a 16-bit core insn and cmov
6438 is a 32-bit cop insn. However,
6440 add3 $1, $1, 1
6441 cmov $cr0, $0
6443 packs just fine. For good VLIW code generation in VL64 mode, we
6444 will have to have 32-bit alternatives for many of the common core
6445 insns. Not implemented. */
6447 static int
6448 mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
6450 int cost_specified;
6452 if (REG_NOTE_KIND (link) != 0)
6454 /* See whether INSN and DEP_INSN are intrinsics that set the same
6455 hard register. If so, it is more important to free up DEP_INSN
6456 than it is to free up INSN.
6458 Note that intrinsics like mep_mulr are handled differently from
6459 the equivalent mep.md patterns. In mep.md, if we don't care
6460 about the value of $lo and $hi, the pattern will just clobber
6461 the registers, not set them. Since clobbers don't count as
6462 output dependencies, it is often possible to reorder two mulrs,
6463 even after reload.
6465 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6466 so any pair of mep_mulr()s will be inter-dependent. We should
6467 therefore give the first mep_mulr() a higher priority. */
6468 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6469 && global_reg_mentioned_p (PATTERN (insn))
6470 && global_reg_mentioned_p (PATTERN (dep_insn)))
6471 return 1;
6473 /* If the dependence is an anti or output dependence, assume it
6474 has no cost. */
6475 return 0;
6478 /* If we can't recognize the insns, we can't really do anything. */
6479 if (recog_memoized (dep_insn) < 0)
6480 return cost;
6482 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6483 attribute instead. */
6484 if (!TARGET_H1)
6486 cost_specified = get_attr_latency (dep_insn);
6487 if (cost_specified != 0)
6488 return cost_specified;
6491 return cost;
6494 /* ??? We don't properly compute the length of a load/store insn,
6495 taking into account the addressing mode. */
6497 static int
6498 mep_issue_rate (void)
6500 return TARGET_IVC2 ? 3 : 2;
6503 /* Return true if function DECL was declared with the vliw attribute. */
6505 bool
6506 mep_vliw_function_p (tree decl)
6508 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6511 static rtx_insn *
6512 mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6513 int length)
6515 int i;
6517 for (i = nready - 1; i >= 0; --i)
6519 rtx_insn *insn = ready[i];
6520 if (recog_memoized (insn) >= 0
6521 && get_attr_slot (insn) == slot
6522 && get_attr_length (insn) == length)
6523 return insn;
6526 return NULL;
6529 static void
6530 mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
6532 int i;
6534 for (i = 0; i < nready; ++i)
6535 if (ready[i] == insn)
6537 for (; i < nready - 1; ++i)
6538 ready[i] = ready[i + 1];
6539 ready[i] = insn;
6540 return;
6543 gcc_unreachable ();
6546 static void
6547 mep_print_sched_insn (FILE *dump, rtx_insn *insn)
6549 const char *slots = "none";
6550 const char *name = NULL;
6551 int code;
6552 char buf[30];
6554 if (GET_CODE (PATTERN (insn)) == SET
6555 || GET_CODE (PATTERN (insn)) == PARALLEL)
6557 switch (get_attr_slots (insn))
6559 case SLOTS_CORE: slots = "core"; break;
6560 case SLOTS_C3: slots = "c3"; break;
6561 case SLOTS_P0: slots = "p0"; break;
6562 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6563 case SLOTS_P0_P1: slots = "p0,p1"; break;
6564 case SLOTS_P0S: slots = "p0s"; break;
6565 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6566 case SLOTS_P1: slots = "p1"; break;
6567 default:
6568 sprintf(buf, "%d", get_attr_slots (insn));
6569 slots = buf;
6570 break;
6573 if (GET_CODE (PATTERN (insn)) == USE)
6574 slots = "use";
6576 code = INSN_CODE (insn);
6577 if (code >= 0)
6578 name = get_insn_name (code);
6579 if (!name)
6580 name = "{unknown}";
6582 fprintf (dump,
6583 "insn %4d %4d %8s %s\n",
6584 code,
6585 INSN_UID (insn),
6586 name,
6587 slots);
6590 static int
6591 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6592 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
6593 int *pnready, int clock ATTRIBUTE_UNUSED)
6595 int nready = *pnready;
6596 rtx_insn *core_insn, *cop_insn;
6597 int i;
6599 if (dump && sched_verbose > 1)
6601 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6602 for (i=0; i<nready; i++)
6603 mep_print_sched_insn (dump, ready[i]);
6604 fprintf (dump, "\n");
6607 if (!mep_vliw_function_p (cfun->decl))
6608 return 1;
6609 if (nready < 2)
6610 return 1;
6612 /* IVC2 uses a DFA to determine what's ready and what's not. */
6613 if (TARGET_IVC2)
6614 return nready;
6616 /* We can issue either a core or coprocessor instruction.
6617 Look for a matched pair of insns to reorder. If we don't
6618 find any, don't second-guess the scheduler's priorities. */
6620 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6621 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6622 TARGET_OPT_VL64 ? 6 : 2)))
6624 else if (TARGET_OPT_VL64
6625 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6626 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6628 else
6629 /* We didn't find a pair. Issue the single insn at the head
6630 of the ready list. */
6631 return 1;
6633 /* Reorder the two insns first. */
6634 mep_move_ready_insn (ready, nready, core_insn);
6635 mep_move_ready_insn (ready, nready - 1, cop_insn);
6636 return 2;
6639 /* Return true if X contains a register that is set by insn PREV. */
6641 static bool
6642 mep_store_find_set (const_rtx x, const rtx_insn *prev)
6644 subrtx_iterator::array_type array;
6645 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6646 if (REG_P (x) && reg_set_p (x, prev))
6647 return true;
6648 return false;
6651 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6652 not the containing insn. */
6654 static bool
6655 mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
6657 /* Cope with intrinsics like swcpa. */
6658 if (GET_CODE (pat) == PARALLEL)
6660 int i;
6662 for (i = 0; i < XVECLEN (pat, 0); i++)
6663 if (mep_store_data_bypass_p (prev,
6664 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
6665 return true;
6667 return false;
6670 /* Check for some sort of store. */
6671 if (GET_CODE (pat) != SET
6672 || GET_CODE (SET_DEST (pat)) != MEM)
6673 return false;
6675 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6676 The first operand to the unspec is the store data and the other operands
6677 are used to calculate the address. */
6678 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6680 rtx src;
6681 int i;
6683 src = SET_SRC (pat);
6684 for (i = 1; i < XVECLEN (src, 0); i++)
6685 if (mep_store_find_set (XVECEXP (src, 0, i), prev))
6686 return false;
6688 return true;
6691 /* Otherwise just check that PREV doesn't modify any register mentioned
6692 in the memory destination. */
6693 return !mep_store_find_set (SET_DEST (pat), prev);
6696 /* Return true if INSN is a store instruction and if the store address
6697 has no true dependence on PREV. */
6699 bool
6700 mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
6702 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6705 /* Return true if, apart from HI/LO, there are no true dependencies
6706 between multiplication instructions PREV and INSN. */
6708 bool
6709 mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
6711 rtx pat;
6713 pat = PATTERN (insn);
6714 if (GET_CODE (pat) == PARALLEL)
6715 pat = XVECEXP (pat, 0, 0);
6716 if (GET_CODE (pat) != SET)
6717 return false;
6718 subrtx_iterator::array_type array;
6719 FOR_EACH_SUBRTX (iter, array, SET_SRC (pat), NONCONST)
6721 const_rtx x = *iter;
6722 if (REG_P (x)
6723 && REGNO (x) != LO_REGNO
6724 && REGNO (x) != HI_REGNO
6725 && reg_set_p (x, prev))
6726 return false;
6728 return true;
6731 /* Return true if INSN is an ldc instruction that issues to the
6732 MeP-h1 integer pipeline. This is true for instructions that
6733 read from PSW, LP, SAR, HI and LO. */
6735 bool
6736 mep_ipipe_ldc_p (rtx_insn *insn)
6738 rtx pat, src;
6740 pat = PATTERN (insn);
6742 /* Cope with instrinsics that set both a hard register and its shadow.
6743 The set of the hard register comes first. */
6744 if (GET_CODE (pat) == PARALLEL)
6745 pat = XVECEXP (pat, 0, 0);
6747 if (GET_CODE (pat) == SET)
6749 src = SET_SRC (pat);
6751 /* Cope with intrinsics. The first operand to the unspec is
6752 the source register. */
6753 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6754 src = XVECEXP (src, 0, 0);
6756 if (REG_P (src))
6757 switch (REGNO (src))
6759 case PSW_REGNO:
6760 case LP_REGNO:
6761 case SAR_REGNO:
6762 case HI_REGNO:
6763 case LO_REGNO:
6764 return true;
6767 return false;
6770 /* Create a VLIW bundle from core instruction CORE and coprocessor
6771 instruction COP. COP always satisfies INSN_P, but CORE can be
6772 either a new pattern or an existing instruction.
6774 Emit the bundle in place of COP and return it. */
6776 static rtx_insn *
6777 mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
6779 rtx seq;
6780 rtx_insn *core_insn;
6781 rtx_insn *insn;
6783 /* If CORE is an existing instruction, remove it, otherwise put
6784 the new pattern in an INSN harness. */
6785 if (INSN_P (core_insn_or_pat))
6787 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6788 remove_insn (core_insn);
6790 else
6791 core_insn = make_insn_raw (core_insn_or_pat);
6793 /* Generate the bundle sequence and replace COP with it. */
6794 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
6795 insn = emit_insn_after (seq, cop);
6796 remove_insn (cop);
6798 /* Set up the links of the insns inside the SEQUENCE. */
6799 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6800 SET_NEXT_INSN (core_insn) = cop;
6801 SET_PREV_INSN (cop) = core_insn;
6802 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
6804 /* Set the VLIW flag for the coprocessor instruction. */
6805 PUT_MODE (core_insn, VOIDmode);
6806 PUT_MODE (cop, BImode);
6808 /* Derive a location for the bundle. Individual instructions cannot
6809 have their own location because there can be no assembler labels
6810 between CORE_INSN and COP. */
6811 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6812 INSN_LOCATION (core_insn) = 0;
6813 INSN_LOCATION (cop) = 0;
6815 return insn;
6818 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6820 static void
6821 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6823 rtx * pinsn = (rtx *) data;
6825 if (*pinsn && reg_mentioned_p (x, *pinsn))
6826 *pinsn = NULL_RTX;
6829 /* Return true if anything in insn X is (anti,output,true) dependent on
6830 anything in insn Y. */
6832 static int
6833 mep_insn_dependent_p (rtx x, rtx y)
6835 rtx tmp;
6837 gcc_assert (INSN_P (x));
6838 gcc_assert (INSN_P (y));
6840 tmp = PATTERN (y);
6841 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6842 if (tmp == NULL_RTX)
6843 return 1;
6845 tmp = PATTERN (x);
6846 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6847 if (tmp == NULL_RTX)
6848 return 1;
6850 return 0;
6853 static int
6854 core_insn_p (rtx_insn *insn)
6856 if (GET_CODE (PATTERN (insn)) == USE)
6857 return 0;
6858 if (get_attr_slot (insn) == SLOT_CORE)
6859 return 1;
6860 return 0;
6863 /* Mark coprocessor instructions that can be bundled together with
6864 the immediately preceding core instruction. This is later used
6865 to emit the "+" that tells the assembler to create a VLIW insn.
6867 For unbundled insns, the assembler will automatically add coprocessor
6868 nops, and 16-bit core nops. Due to an apparent oversight in the
6869 spec, the assembler will _not_ automatically add 32-bit core nops,
6870 so we have to emit those here.
6872 Called from mep_insn_reorg. */
6874 static void
6875 mep_bundle_insns (rtx_insn *insns)
6877 rtx_insn *insn, *last = NULL, *first = NULL;
6878 int saw_scheduling = 0;
6880 /* Only do bundling if we're in vliw mode. */
6881 if (!mep_vliw_function_p (cfun->decl))
6882 return;
6884 /* The first insn in a bundle are TImode, the remainder are
6885 VOIDmode. After this function, the first has VOIDmode and the
6886 rest have BImode. */
6888 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6890 /* First, move any NOTEs that are within a bundle, to the beginning
6891 of the bundle. */
6892 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6894 if (NOTE_P (insn) && first)
6895 /* Don't clear FIRST. */;
6897 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6898 first = insn;
6900 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6902 rtx_insn *note, *prev;
6904 /* INSN is part of a bundle; FIRST is the first insn in that
6905 bundle. Move all intervening notes out of the bundle.
6906 In addition, since the debug pass may insert a label
6907 whenever the current line changes, set the location info
6908 for INSN to match FIRST. */
6910 INSN_LOCATION (insn) = INSN_LOCATION (first);
6912 note = PREV_INSN (insn);
6913 while (note && note != first)
6915 prev = PREV_INSN (note);
6917 if (NOTE_P (note))
6919 /* Remove NOTE from here... */
6920 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6921 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6922 /* ...and put it in here. */
6923 SET_NEXT_INSN (note) = first;
6924 SET_PREV_INSN (note) = PREV_INSN (first);
6925 SET_NEXT_INSN (PREV_INSN (note)) = note;
6926 SET_PREV_INSN (NEXT_INSN (note)) = note;
6929 note = prev;
6933 else if (!NONJUMP_INSN_P (insn))
6934 first = 0;
6937 /* Now fix up the bundles. */
6938 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6940 if (NOTE_P (insn))
6941 continue;
6943 if (!NONJUMP_INSN_P (insn))
6945 last = 0;
6946 continue;
6949 /* If we're not optimizing enough, there won't be scheduling
6950 info. We detect that here. */
6951 if (GET_MODE (insn) == TImode)
6952 saw_scheduling = 1;
6953 if (!saw_scheduling)
6954 continue;
6956 if (TARGET_IVC2)
6958 rtx_insn *core_insn = NULL;
6960 /* IVC2 slots are scheduled by DFA, so we just accept
6961 whatever the scheduler gives us. However, we must make
6962 sure the core insn (if any) is the first in the bundle.
6963 The IVC2 assembler can insert whatever NOPs are needed,
6964 and allows a COP insn to be first. */
6966 if (NONJUMP_INSN_P (insn)
6967 && GET_CODE (PATTERN (insn)) != USE
6968 && GET_MODE (insn) == TImode)
6970 for (last = insn;
6971 NEXT_INSN (last)
6972 && GET_MODE (NEXT_INSN (last)) == VOIDmode
6973 && NONJUMP_INSN_P (NEXT_INSN (last));
6974 last = NEXT_INSN (last))
6976 if (core_insn_p (last))
6977 core_insn = last;
6979 if (core_insn_p (last))
6980 core_insn = last;
6982 if (core_insn && core_insn != insn)
6984 /* Swap core insn to first in the bundle. */
6986 /* Remove core insn. */
6987 if (PREV_INSN (core_insn))
6988 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
6989 if (NEXT_INSN (core_insn))
6990 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
6992 /* Re-insert core insn. */
6993 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6994 SET_NEXT_INSN (core_insn) = insn;
6996 if (PREV_INSN (core_insn))
6997 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
6998 SET_PREV_INSN (insn) = core_insn;
7000 PUT_MODE (core_insn, TImode);
7001 PUT_MODE (insn, VOIDmode);
7005 /* The first insn has TImode, the rest have VOIDmode */
7006 if (GET_MODE (insn) == TImode)
7007 PUT_MODE (insn, VOIDmode);
7008 else
7009 PUT_MODE (insn, BImode);
7010 continue;
7013 PUT_MODE (insn, VOIDmode);
7014 if (recog_memoized (insn) >= 0
7015 && get_attr_slot (insn) == SLOT_COP)
7017 if (JUMP_P (insn)
7018 || ! last
7019 || recog_memoized (last) < 0
7020 || get_attr_slot (last) != SLOT_CORE
7021 || (get_attr_length (insn)
7022 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7023 || mep_insn_dependent_p (insn, last))
7025 switch (get_attr_length (insn))
7027 case 8:
7028 break;
7029 case 6:
7030 insn = mep_make_bundle (gen_nop (), insn);
7031 break;
7032 case 4:
7033 if (TARGET_OPT_VL64)
7034 insn = mep_make_bundle (gen_nop32 (), insn);
7035 break;
7036 case 2:
7037 if (TARGET_OPT_VL64)
7038 error ("2 byte cop instructions are"
7039 " not allowed in 64-bit VLIW mode");
7040 else
7041 insn = mep_make_bundle (gen_nop (), insn);
7042 break;
7043 default:
7044 error ("unexpected %d byte cop instruction",
7045 get_attr_length (insn));
7046 break;
7049 else
7050 insn = mep_make_bundle (last, insn);
7053 last = insn;
7058 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7059 Return true on success. This function can fail if the intrinsic
7060 is unavailable or if the operands don't satisfy their predicates. */
7062 bool
7063 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7065 const struct cgen_insn *cgen_insn;
7066 const struct insn_data_d *idata;
7067 rtx newop[10];
7068 int i;
7070 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7071 return false;
7073 idata = &insn_data[cgen_insn->icode];
7074 for (i = 0; i < idata->n_operands; i++)
7076 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7077 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7078 return false;
7081 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7082 newop[3], newop[4], newop[5],
7083 newop[6], newop[7], newop[8]));
7085 return true;
7089 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7090 OPERANDS[0]. Report an error if the instruction could not
7091 be synthesized. OPERANDS[1] is a register_operand. For sign
7092 and zero extensions, it may be smaller than SImode. */
7094 bool
7095 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7096 rtx * operands ATTRIBUTE_UNUSED)
7098 return false;
7102 /* Likewise, but apply a binary operation to OPERANDS[1] and
7103 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7104 can be a general_operand.
7106 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7107 third operand. REG and REG3 take register operands only. */
7109 bool
7110 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7111 int ATTRIBUTE_UNUSED immediate3,
7112 int ATTRIBUTE_UNUSED reg,
7113 int ATTRIBUTE_UNUSED reg3,
7114 rtx * operands ATTRIBUTE_UNUSED)
7116 return false;
7119 static bool
7120 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7121 int opno ATTRIBUTE_UNUSED, int *total,
7122 bool ATTRIBUTE_UNUSED speed_t)
7124 switch (code)
7126 case CONST_INT:
7127 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7128 *total = 0;
7129 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7130 *total = 1;
7131 else
7132 *total = 3;
7133 return true;
7135 case SYMBOL_REF:
7136 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7137 return true;
7139 case MULT:
7140 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7141 ? COSTS_N_INSNS (3)
7142 : COSTS_N_INSNS (2));
7143 return true;
7145 return false;
7148 static int
7149 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7150 machine_mode mode ATTRIBUTE_UNUSED,
7151 addr_space_t as ATTRIBUTE_UNUSED,
7152 bool ATTRIBUTE_UNUSED speed_p)
7154 return 1;
7157 static void
7158 mep_asm_init_sections (void)
7160 based_section
7161 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7162 "\t.section .based,\"aw\"");
7164 tinybss_section
7165 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7166 "\t.section .sbss,\"aw\"");
7168 sdata_section
7169 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7170 "\t.section .sdata,\"aw\",@progbits");
7172 far_section
7173 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7174 "\t.section .far,\"aw\"");
7176 farbss_section
7177 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7178 "\t.section .farbss,\"aw\"");
7180 frodata_section
7181 = get_unnamed_section (0, output_section_asm_op,
7182 "\t.section .frodata,\"a\"");
7184 srodata_section
7185 = get_unnamed_section (0, output_section_asm_op,
7186 "\t.section .srodata,\"a\"");
7188 vtext_section
7189 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7190 "\t.section .vtext,\"axv\"\n\t.vliw");
7192 vftext_section
7193 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7194 "\t.section .vftext,\"axv\"\n\t.vliw");
7196 ftext_section
7197 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7198 "\t.section .ftext,\"ax\"\n\t.core");
7202 /* Initialize the GCC target structure. */
7204 #undef TARGET_ASM_FUNCTION_PROLOGUE
7205 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7206 #undef TARGET_ATTRIBUTE_TABLE
7207 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7208 #undef TARGET_COMP_TYPE_ATTRIBUTES
7209 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7210 #undef TARGET_INSERT_ATTRIBUTES
7211 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7212 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7213 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7214 #undef TARGET_CAN_INLINE_P
7215 #define TARGET_CAN_INLINE_P mep_can_inline_p
7216 #undef TARGET_SECTION_TYPE_FLAGS
7217 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7218 #undef TARGET_ASM_NAMED_SECTION
7219 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7220 #undef TARGET_INIT_BUILTINS
7221 #define TARGET_INIT_BUILTINS mep_init_builtins
7222 #undef TARGET_EXPAND_BUILTIN
7223 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7224 #undef TARGET_SCHED_ADJUST_COST
7225 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7226 #undef TARGET_SCHED_ISSUE_RATE
7227 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7228 #undef TARGET_SCHED_REORDER
7229 #define TARGET_SCHED_REORDER mep_sched_reorder
7230 #undef TARGET_STRIP_NAME_ENCODING
7231 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7232 #undef TARGET_ASM_SELECT_SECTION
7233 #define TARGET_ASM_SELECT_SECTION mep_select_section
7234 #undef TARGET_ASM_UNIQUE_SECTION
7235 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7236 #undef TARGET_ENCODE_SECTION_INFO
7237 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7238 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7239 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7240 #undef TARGET_RTX_COSTS
7241 #define TARGET_RTX_COSTS mep_rtx_cost
7242 #undef TARGET_ADDRESS_COST
7243 #define TARGET_ADDRESS_COST mep_address_cost
7244 #undef TARGET_MACHINE_DEPENDENT_REORG
7245 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7246 #undef TARGET_SETUP_INCOMING_VARARGS
7247 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7248 #undef TARGET_PASS_BY_REFERENCE
7249 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7250 #undef TARGET_FUNCTION_ARG
7251 #define TARGET_FUNCTION_ARG mep_function_arg
7252 #undef TARGET_FUNCTION_ARG_ADVANCE
7253 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7254 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7255 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7256 #undef TARGET_OPTION_OVERRIDE
7257 #define TARGET_OPTION_OVERRIDE mep_option_override
7258 #undef TARGET_ALLOCATE_INITIAL_VALUE
7259 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7260 #undef TARGET_ASM_INIT_SECTIONS
7261 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7262 #undef TARGET_RETURN_IN_MEMORY
7263 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7264 #undef TARGET_NARROW_VOLATILE_BITFIELD
7265 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7266 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7267 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7268 #undef TARGET_BUILD_BUILTIN_VA_LIST
7269 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7270 #undef TARGET_EXPAND_BUILTIN_VA_START
7271 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7272 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7273 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7274 #undef TARGET_CAN_ELIMINATE
7275 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7276 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7277 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7278 #undef TARGET_TRAMPOLINE_INIT
7279 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7280 #undef TARGET_LEGITIMATE_CONSTANT_P
7281 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7282 #undef TARGET_CAN_USE_DOLOOP_P
7283 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7285 struct gcc_target targetm = TARGET_INITIALIZER;
7287 #include "gt-mep.h"