2014-12-19 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / config / mep / mep.c
blobb1dbc7cf9cc6d8302e16f268e14ca25b6cca3084
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "varasm.h"
28 #include "calls.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "insn-flags.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "recog.h"
40 #include "obstack.h"
41 #include "tree.h"
42 #include "expr.h"
43 #include "except.h"
44 #include "hashtab.h"
45 #include "hash-set.h"
46 #include "vec.h"
47 #include "machmode.h"
48 #include "input.h"
49 #include "function.h"
50 #include "insn-codes.h"
51 #include "optabs.h"
52 #include "reload.h"
53 #include "tm_p.h"
54 #include "ggc.h"
55 #include "diagnostic-core.h"
56 #include "target.h"
57 #include "target-def.h"
58 #include "langhooks.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "cfgrtl.h"
62 #include "cfganal.h"
63 #include "lcm.h"
64 #include "cfgbuild.h"
65 #include "cfgcleanup.h"
66 #include "predict.h"
67 #include "basic-block.h"
68 #include "df.h"
69 #include "hash-table.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "gimple-fold.h"
73 #include "tree-eh.h"
74 #include "gimple-expr.h"
75 #include "is-a.h"
76 #include "gimple.h"
77 #include "gimplify.h"
78 #include "opts.h"
79 #include "dumpfile.h"
80 #include "builtins.h"
81 #include "rtl-iter.h"
83 /* Structure of this file:
85 + Command Line Option Support
86 + Pattern support - constraints, predicates, expanders
87 + Reload Support
88 + Costs
89 + Functions to save and restore machine-specific function data.
90 + Frame/Epilog/Prolog Related
91 + Operand Printing
92 + Function args in registers
93 + Handle pipeline hazards
94 + Handle attributes
95 + Trampolines
96 + Machine-dependent Reorg
97 + Builtins. */
99 /* Symbol encodings:
101 Symbols are encoded as @ <char> . <name> where <char> is one of these:
103 b - based
104 t - tiny
105 n - near
106 f - far
107 i - io, near
108 I - io, far
109 c - cb (control bus) */
111 struct GTY(()) machine_function
113 int mep_frame_pointer_needed;
115 /* For varargs. */
116 int arg_regs_to_save;
117 int regsave_filler;
118 int frame_filler;
119 int frame_locked;
121 /* Records __builtin_return address. */
122 rtx eh_stack_adjust;
124 int reg_save_size;
125 int reg_save_slot[FIRST_PSEUDO_REGISTER];
126 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
128 /* 2 if the current function has an interrupt attribute, 1 if not, 0
129 if unknown. This is here because resource.c uses EPILOGUE_USES
130 which needs it. */
131 int interrupt_handler;
133 /* Likewise, for disinterrupt attribute. */
134 int disable_interrupts;
136 /* Number of doloop tags used so far. */
137 int doloop_tags;
139 /* True if the last tag was allocated to a doloop_end. */
140 bool doloop_tag_from_end;
142 /* True if reload changes $TP. */
143 bool reload_changes_tp;
145 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
146 We only set this if the function is an interrupt handler. */
147 int asms_without_operands;
150 #define MEP_CONTROL_REG(x) \
151 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
153 static GTY(()) section * based_section;
154 static GTY(()) section * tinybss_section;
155 static GTY(()) section * far_section;
156 static GTY(()) section * farbss_section;
157 static GTY(()) section * frodata_section;
158 static GTY(()) section * srodata_section;
160 static GTY(()) section * vtext_section;
161 static GTY(()) section * vftext_section;
162 static GTY(()) section * ftext_section;
164 static void mep_set_leaf_registers (int);
165 static bool symbol_p (rtx);
166 static bool symbolref_p (rtx);
167 static void encode_pattern_1 (rtx);
168 static void encode_pattern (rtx);
169 static bool const_in_range (rtx, int, int);
170 static void mep_rewrite_mult (rtx_insn *, rtx);
171 static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
172 static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
173 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
174 static bool move_needs_splitting (rtx, rtx, machine_mode);
175 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
176 static bool mep_nongeneral_reg (rtx);
177 static bool mep_general_copro_reg (rtx);
178 static bool mep_nonregister (rtx);
179 static struct machine_function* mep_init_machine_status (void);
180 static rtx mep_tp_rtx (void);
181 static rtx mep_gp_rtx (void);
182 static bool mep_interrupt_p (void);
183 static bool mep_disinterrupt_p (void);
184 static bool mep_reg_set_p (rtx, rtx);
185 static bool mep_reg_set_in_function (int);
186 static bool mep_interrupt_saved_reg (int);
187 static bool mep_call_saves_register (int);
188 static rtx_insn *F (rtx_insn *);
189 static void add_constant (int, int, int, int);
190 static rtx_insn *maybe_dead_move (rtx, rtx, bool);
191 static void mep_reload_pointer (int, const char *);
192 static void mep_start_function (FILE *, HOST_WIDE_INT);
193 static bool mep_function_ok_for_sibcall (tree, tree);
194 static int unique_bit_in (HOST_WIDE_INT);
195 static int bit_size_for_clip (HOST_WIDE_INT);
196 static int bytesize (const_tree, machine_mode);
197 static tree mep_validate_type_based_tiny (tree *, tree, tree, int, bool *);
198 static tree mep_validate_decl_based_tiny (tree *, tree, tree, int, bool *);
199 static tree mep_validate_type_near_far (tree *, tree, tree, int, bool *);
200 static tree mep_validate_decl_near_far (tree *, tree, tree, int, bool *);
201 static tree mep_validate_type_disinterrupt (tree *, tree, tree, int, bool *);
202 static tree mep_validate_decl_disinterrupt (tree *, tree, tree, int, bool *);
203 static tree mep_validate_type_warning(tree *, tree, tree, int, bool *);
204 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
205 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
206 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
207 static bool mep_function_attribute_inlinable_p (const_tree);
208 static bool mep_can_inline_p (tree, tree);
209 static bool mep_lookup_pragma_disinterrupt (const char *);
210 static int mep_multiple_address_regions (tree, bool);
211 static int mep_attrlist_to_encoding (tree, tree);
212 static void mep_insert_attributes (tree, tree *);
213 static void mep_encode_section_info (tree, rtx, int);
214 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
215 static void mep_unique_section (tree, int);
216 static unsigned int mep_section_type_flags (tree, const char *, int);
217 static void mep_asm_named_section (const char *, unsigned int, tree);
218 static bool mep_mentioned_p (rtx, rtx, int);
219 static void mep_reorg_regmove (rtx_insn *);
220 static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
221 bool, bool);
222 static void mep_reorg_repeat (rtx_insn *);
223 static bool mep_invertable_branch_p (rtx_insn *);
224 static void mep_invert_branch (rtx_insn *, rtx_insn *);
225 static void mep_reorg_erepeat (rtx_insn *);
226 static void mep_jmp_return_reorg (rtx_insn *);
227 static void mep_reorg_addcombine (rtx_insn *);
228 static void mep_reorg (void);
229 static void mep_init_intrinsics (void);
230 static void mep_init_builtins (void);
231 static void mep_intrinsic_unavailable (int);
232 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
233 static bool mep_get_move_insn (int, const struct cgen_insn **);
234 static rtx mep_convert_arg (machine_mode, rtx);
235 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
236 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
237 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
238 static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
239 static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
240 static int mep_issue_rate (void);
241 static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
242 static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
243 static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
244 static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
245 static void mep_bundle_insns (rtx_insn *);
246 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
247 static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
248 static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
249 tree, int *, int);
250 static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
251 const_tree, bool);
252 static rtx mep_function_arg (cumulative_args_t, machine_mode,
253 const_tree, bool);
254 static void mep_function_arg_advance (cumulative_args_t, machine_mode,
255 const_tree, bool);
256 static bool mep_vector_mode_supported_p (machine_mode);
257 static rtx mep_allocate_initial_value (rtx);
258 static void mep_asm_init_sections (void);
259 static int mep_comp_type_attributes (const_tree, const_tree);
260 static bool mep_narrow_volatile_bitfield (void);
261 static rtx mep_expand_builtin_saveregs (void);
262 static tree mep_build_builtin_va_list (void);
263 static void mep_expand_va_start (tree, rtx);
264 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
265 static bool mep_can_eliminate (const int, const int);
266 static void mep_conditional_register_usage (void);
267 static void mep_trampoline_init (rtx, tree, rtx);
269 #define WANT_GCC_DEFINITIONS
270 #include "mep-intrin.h"
271 #undef WANT_GCC_DEFINITIONS
274 /* Command Line Option Support. */
276 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
278 /* True if we can use cmov instructions to move values back and forth
279 between core and coprocessor registers. */
280 bool mep_have_core_copro_moves_p;
282 /* True if we can use cmov instructions (or a work-alike) to move
283 values between coprocessor registers. */
284 bool mep_have_copro_copro_moves_p;
286 /* A table of all coprocessor instructions that can act like
287 a coprocessor-to-coprocessor cmov. */
288 static const int mep_cmov_insns[] = {
289 mep_cmov,
290 mep_cpmov,
291 mep_fmovs,
292 mep_caddi3,
293 mep_csubi3,
294 mep_candi3,
295 mep_cori3,
296 mep_cxori3,
297 mep_cand3,
298 mep_cor3
302 static void
303 mep_set_leaf_registers (int enable)
305 int i;
307 if (mep_leaf_registers[0] != enable)
308 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
309 mep_leaf_registers[i] = enable;
312 static void
313 mep_conditional_register_usage (void)
315 int i;
317 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
319 fixed_regs[HI_REGNO] = 1;
320 fixed_regs[LO_REGNO] = 1;
321 call_used_regs[HI_REGNO] = 1;
322 call_used_regs[LO_REGNO] = 1;
325 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
326 global_regs[i] = 1;
329 static void
330 mep_option_override (void)
332 unsigned int i;
333 int j;
334 cl_deferred_option *opt;
335 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
337 if (v)
338 FOR_EACH_VEC_ELT (*v, i, opt)
340 switch (opt->opt_index)
342 case OPT_mivc2:
343 for (j = 0; j < 32; j++)
344 fixed_regs[j + 48] = 0;
345 for (j = 0; j < 32; j++)
346 call_used_regs[j + 48] = 1;
347 for (j = 6; j < 8; j++)
348 call_used_regs[j + 48] = 0;
350 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
351 RN (0, "$csar0");
352 RN (1, "$cc");
353 RN (4, "$cofr0");
354 RN (5, "$cofr1");
355 RN (6, "$cofa0");
356 RN (7, "$cofa1");
357 RN (15, "$csar1");
359 RN (16, "$acc0_0");
360 RN (17, "$acc0_1");
361 RN (18, "$acc0_2");
362 RN (19, "$acc0_3");
363 RN (20, "$acc0_4");
364 RN (21, "$acc0_5");
365 RN (22, "$acc0_6");
366 RN (23, "$acc0_7");
368 RN (24, "$acc1_0");
369 RN (25, "$acc1_1");
370 RN (26, "$acc1_2");
371 RN (27, "$acc1_3");
372 RN (28, "$acc1_4");
373 RN (29, "$acc1_5");
374 RN (30, "$acc1_6");
375 RN (31, "$acc1_7");
376 #undef RN
377 break;
379 default:
380 gcc_unreachable ();
384 if (flag_pic == 1)
385 warning (OPT_fpic, "-fpic is not supported");
386 if (flag_pic == 2)
387 warning (OPT_fPIC, "-fPIC is not supported");
388 if (TARGET_S && TARGET_M)
389 error ("only one of -ms and -mm may be given");
390 if (TARGET_S && TARGET_L)
391 error ("only one of -ms and -ml may be given");
392 if (TARGET_M && TARGET_L)
393 error ("only one of -mm and -ml may be given");
394 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
395 error ("only one of -ms and -mtiny= may be given");
396 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
397 error ("only one of -mm and -mtiny= may be given");
398 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
399 warning (0, "-mclip currently has no effect without -mminmax");
401 if (mep_const_section)
403 if (strcmp (mep_const_section, "tiny") != 0
404 && strcmp (mep_const_section, "near") != 0
405 && strcmp (mep_const_section, "far") != 0)
406 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
409 if (TARGET_S)
410 mep_tiny_cutoff = 65536;
411 if (TARGET_M)
412 mep_tiny_cutoff = 0;
413 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
414 mep_tiny_cutoff = 0;
416 if (TARGET_64BIT_CR_REGS)
417 flag_split_wide_types = 0;
419 init_machine_status = mep_init_machine_status;
420 mep_init_intrinsics ();
423 /* Pattern Support - constraints, predicates, expanders. */
425 /* MEP has very few instructions that can refer to the span of
426 addresses used by symbols, so it's common to check for them. */
428 static bool
429 symbol_p (rtx x)
431 int c = GET_CODE (x);
433 return (c == CONST_INT
434 || c == CONST
435 || c == SYMBOL_REF);
438 static bool
439 symbolref_p (rtx x)
441 int c;
443 if (GET_CODE (x) != MEM)
444 return false;
446 c = GET_CODE (XEXP (x, 0));
447 return (c == CONST_INT
448 || c == CONST
449 || c == SYMBOL_REF);
452 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
454 #define GEN_REG(R, STRICT) \
455 (GR_REGNO_P (R) \
456 || (!STRICT \
457 && ((R) == ARG_POINTER_REGNUM \
458 || (R) >= FIRST_PSEUDO_REGISTER)))
460 static char pattern[12], *patternp;
461 static GTY(()) rtx patternr[12];
462 #define RTX_IS(x) (strcmp (pattern, x) == 0)
464 static void
465 encode_pattern_1 (rtx x)
467 int i;
469 if (patternp == pattern + sizeof (pattern) - 2)
471 patternp[-1] = '?';
472 return;
475 patternr[patternp-pattern] = x;
477 switch (GET_CODE (x))
479 case REG:
480 *patternp++ = 'r';
481 break;
482 case MEM:
483 *patternp++ = 'm';
484 case CONST:
485 encode_pattern_1 (XEXP(x, 0));
486 break;
487 case PLUS:
488 *patternp++ = '+';
489 encode_pattern_1 (XEXP(x, 0));
490 encode_pattern_1 (XEXP(x, 1));
491 break;
492 case LO_SUM:
493 *patternp++ = 'L';
494 encode_pattern_1 (XEXP(x, 0));
495 encode_pattern_1 (XEXP(x, 1));
496 break;
497 case HIGH:
498 *patternp++ = 'H';
499 encode_pattern_1 (XEXP(x, 0));
500 break;
501 case SYMBOL_REF:
502 *patternp++ = 's';
503 break;
504 case LABEL_REF:
505 *patternp++ = 'l';
506 break;
507 case CONST_INT:
508 case CONST_DOUBLE:
509 *patternp++ = 'i';
510 break;
511 case UNSPEC:
512 *patternp++ = 'u';
513 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
514 for (i=0; i<XVECLEN (x, 0); i++)
515 encode_pattern_1 (XVECEXP (x, 0, i));
516 break;
517 case USE:
518 *patternp++ = 'U';
519 break;
520 default:
521 *patternp++ = '?';
522 #if 0
523 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
524 debug_rtx (x);
525 gcc_unreachable ();
526 #endif
527 break;
531 static void
532 encode_pattern (rtx x)
534 patternp = pattern;
535 encode_pattern_1 (x);
536 *patternp = 0;
540 mep_section_tag (rtx x)
542 const char *name;
544 while (1)
546 switch (GET_CODE (x))
548 case MEM:
549 case CONST:
550 x = XEXP (x, 0);
551 break;
552 case UNSPEC:
553 x = XVECEXP (x, 0, 0);
554 break;
555 case PLUS:
556 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
557 return 0;
558 x = XEXP (x, 0);
559 break;
560 default:
561 goto done;
564 done:
565 if (GET_CODE (x) != SYMBOL_REF)
566 return 0;
567 name = XSTR (x, 0);
568 if (name[0] == '@' && name[2] == '.')
570 if (name[1] == 'i' || name[1] == 'I')
572 if (name[1] == 'I')
573 return 'f'; /* near */
574 return 'n'; /* far */
576 return name[1];
578 return 0;
582 mep_regno_reg_class (int regno)
584 switch (regno)
586 case SP_REGNO: return SP_REGS;
587 case TP_REGNO: return TP_REGS;
588 case GP_REGNO: return GP_REGS;
589 case 0: return R0_REGS;
590 case HI_REGNO: return HI_REGS;
591 case LO_REGNO: return LO_REGS;
592 case ARG_POINTER_REGNUM: return GENERAL_REGS;
595 if (GR_REGNO_P (regno))
596 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
597 if (CONTROL_REGNO_P (regno))
598 return CONTROL_REGS;
600 if (CR_REGNO_P (regno))
602 int i, j;
604 /* Search for the register amongst user-defined subclasses of
605 the coprocessor registers. */
606 for (i = USER0_REGS; i <= USER3_REGS; ++i)
608 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
609 continue;
610 for (j = 0; j < N_REG_CLASSES; ++j)
612 enum reg_class sub = reg_class_subclasses[i][j];
614 if (sub == LIM_REG_CLASSES)
615 return i;
616 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
617 break;
621 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
624 if (CCR_REGNO_P (regno))
625 return CCR_REGS;
627 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
628 return NO_REGS;
631 static bool
632 const_in_range (rtx x, int minv, int maxv)
634 return (GET_CODE (x) == CONST_INT
635 && INTVAL (x) >= minv
636 && INTVAL (x) <= maxv);
639 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
640 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
641 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
642 at the end of the insn stream. */
645 mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
647 if (rtx_equal_p (dest, src1))
648 return src2;
649 else if (rtx_equal_p (dest, src2))
650 return src1;
651 else
653 if (insn == 0)
654 emit_insn (gen_movsi (copy_rtx (dest), src1));
655 else
656 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
657 return src2;
661 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
662 Change the last element of PATTERN from (clobber (scratch:SI))
663 to (clobber (reg:SI HI_REGNO)). */
665 static void
666 mep_rewrite_mult (rtx_insn *insn, rtx pattern)
668 rtx hi_clobber;
670 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
671 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
672 PATTERN (insn) = pattern;
673 INSN_CODE (insn) = -1;
676 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
677 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
678 store the result in DEST if nonnull. */
680 static void
681 mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
683 rtx lo, pattern;
685 lo = gen_rtx_REG (SImode, LO_REGNO);
686 if (dest)
687 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
688 mep_mulr_source (insn, dest, src1, src2));
689 else
690 pattern = gen_mulsi3_lo (lo, src1, src2);
691 mep_rewrite_mult (insn, pattern);
694 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
695 SRC3 into $lo, then use either madd or maddr. The move into $lo will
696 be deleted by a peephole2 if SRC3 is already in $lo. */
698 static void
699 mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
701 rtx lo, pattern;
703 lo = gen_rtx_REG (SImode, LO_REGNO);
704 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
705 if (dest)
706 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
707 mep_mulr_source (insn, dest, src1, src2),
708 copy_rtx (lo));
709 else
710 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
711 mep_rewrite_mult (insn, pattern);
714 /* Return true if $lo has the same value as integer register GPR when
715 instruction INSN is reached. If necessary, rewrite the instruction
716 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
717 rtx for (reg:SI LO_REGNO).
719 This function is intended to be used by the peephole2 pass. Since
720 that pass goes from the end of a basic block to the beginning, and
721 propagates liveness information on the way, there is no need to
722 update register notes here.
724 If GPR_DEAD_P is true on entry, and this function returns true,
725 then the caller will replace _every_ use of GPR in and after INSN
726 with LO. This means that if the instruction that sets $lo is a
727 mulr- or maddr-type instruction, we can rewrite it to use mul or
728 madd instead. In combination with the copy progagation pass,
729 this allows us to replace sequences like:
731 mov GPR,R1
732 mulr GPR,R2
734 with:
736 mul R1,R2
738 if GPR is no longer used. */
740 static bool
741 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
745 insn = PREV_INSN (insn);
746 if (INSN_P (insn))
747 switch (recog_memoized (insn))
749 case CODE_FOR_mulsi3_1:
750 extract_insn (insn);
751 if (rtx_equal_p (recog_data.operand[0], gpr))
753 mep_rewrite_mulsi3 (insn,
754 gpr_dead_p ? NULL : recog_data.operand[0],
755 recog_data.operand[1],
756 recog_data.operand[2]);
757 return true;
759 return false;
761 case CODE_FOR_maddsi3:
762 extract_insn (insn);
763 if (rtx_equal_p (recog_data.operand[0], gpr))
765 mep_rewrite_maddsi3 (insn,
766 gpr_dead_p ? NULL : recog_data.operand[0],
767 recog_data.operand[1],
768 recog_data.operand[2],
769 recog_data.operand[3]);
770 return true;
772 return false;
774 case CODE_FOR_mulsi3r:
775 case CODE_FOR_maddsi3r:
776 extract_insn (insn);
777 return rtx_equal_p (recog_data.operand[1], gpr);
779 default:
780 if (reg_set_p (lo, insn)
781 || reg_set_p (gpr, insn)
782 || volatile_insn_p (PATTERN (insn)))
783 return false;
785 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
786 gpr_dead_p = false;
787 break;
790 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
791 return false;
794 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
796 bool
797 mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
799 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
800 extract_insn (insn);
801 return result;
804 /* Return true if SET can be turned into a post-modify load or store
805 that adds OFFSET to GPR. In other words, return true if SET can be
806 changed into:
808 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
810 It's OK to change SET to an equivalent operation in order to
811 make it match. */
813 static bool
814 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
816 rtx *reg, *mem;
817 unsigned int reg_bytes, mem_bytes;
818 machine_mode reg_mode, mem_mode;
820 /* Only simple SETs can be converted. */
821 if (GET_CODE (set) != SET)
822 return false;
824 /* Point REG to what we hope will be the register side of the set and
825 MEM to what we hope will be the memory side. */
826 if (GET_CODE (SET_DEST (set)) == MEM)
828 mem = &SET_DEST (set);
829 reg = &SET_SRC (set);
831 else
833 reg = &SET_DEST (set);
834 mem = &SET_SRC (set);
835 if (GET_CODE (*mem) == SIGN_EXTEND)
836 mem = &XEXP (*mem, 0);
839 /* Check that *REG is a suitable coprocessor register. */
840 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
841 return false;
843 /* Check that *MEM is a suitable memory reference. */
844 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
845 return false;
847 /* Get the number of bytes in each operand. */
848 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
849 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
851 /* Check that OFFSET is suitably aligned. */
852 if (INTVAL (offset) & (mem_bytes - 1))
853 return false;
855 /* Convert *MEM to a normal integer mode. */
856 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
857 *mem = change_address (*mem, mem_mode, NULL);
859 /* Adjust *REG as well. */
860 *reg = shallow_copy_rtx (*reg);
861 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
863 /* SET is a subword load. Convert it to an explicit extension. */
864 PUT_MODE (*reg, SImode);
865 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
867 else
869 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
870 PUT_MODE (*reg, reg_mode);
872 return true;
875 /* Return the effect of frame-related instruction INSN. */
877 static rtx
878 mep_frame_expr (rtx_insn *insn)
880 rtx note, expr;
882 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
883 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
884 RTX_FRAME_RELATED_P (expr) = 1;
885 return expr;
888 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
889 new pattern in INSN1; INSN2 will be deleted by the caller. */
891 static void
892 mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
894 rtx expr;
896 if (RTX_FRAME_RELATED_P (insn2))
898 expr = mep_frame_expr (insn2);
899 if (RTX_FRAME_RELATED_P (insn1))
900 expr = gen_rtx_SEQUENCE (VOIDmode,
901 gen_rtvec (2, mep_frame_expr (insn1), expr));
902 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
903 RTX_FRAME_RELATED_P (insn1) = 1;
906 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
907 gen_rtvec (2, PATTERN (insn1),
908 PATTERN (insn2)));
909 INSN_CODE (insn1) = -1;
912 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
913 the basic block to see if any previous load or store instruction can
914 be persuaded to do SET_INSN as a side-effect. Return true if so. */
916 static bool
917 mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
919 rtx_insn *insn;
921 insn = set_insn;
924 insn = PREV_INSN (insn);
925 if (INSN_P (insn))
927 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
929 mep_make_parallel (insn, set_insn);
930 return true;
933 if (reg_set_p (reg, insn)
934 || reg_referenced_p (reg, PATTERN (insn))
935 || volatile_insn_p (PATTERN (insn)))
936 return false;
939 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
940 return false;
943 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
945 bool
946 mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
948 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
949 extract_insn (insn);
950 return result;
953 bool
954 mep_allow_clip (rtx ux, rtx lx, int s)
956 HOST_WIDE_INT u = INTVAL (ux);
957 HOST_WIDE_INT l = INTVAL (lx);
958 int i;
960 if (!TARGET_OPT_CLIP)
961 return false;
963 if (s)
965 for (i = 0; i < 30; i ++)
966 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
967 && (l == - ((HOST_WIDE_INT) 1 << i)))
968 return true;
970 else
972 if (l != 0)
973 return false;
975 for (i = 0; i < 30; i ++)
976 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
977 return true;
979 return false;
982 bool
983 mep_bit_position_p (rtx x, bool looking_for)
985 if (GET_CODE (x) != CONST_INT)
986 return false;
987 switch ((int) INTVAL(x) & 0xff)
989 case 0x01: case 0x02: case 0x04: case 0x08:
990 case 0x10: case 0x20: case 0x40: case 0x80:
991 return looking_for;
992 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
993 case 0xef: case 0xdf: case 0xbf: case 0x7f:
994 return !looking_for;
996 return false;
999 static bool
1000 move_needs_splitting (rtx dest, rtx src,
1001 machine_mode mode ATTRIBUTE_UNUSED)
1003 int s = mep_section_tag (src);
1005 while (1)
1007 if (GET_CODE (src) == CONST
1008 || GET_CODE (src) == MEM)
1009 src = XEXP (src, 0);
1010 else if (GET_CODE (src) == SYMBOL_REF
1011 || GET_CODE (src) == LABEL_REF
1012 || GET_CODE (src) == PLUS)
1013 break;
1014 else
1015 return false;
1017 if (s == 'f'
1018 || (GET_CODE (src) == PLUS
1019 && GET_CODE (XEXP (src, 1)) == CONST_INT
1020 && (INTVAL (XEXP (src, 1)) < -65536
1021 || INTVAL (XEXP (src, 1)) > 0xffffff))
1022 || (GET_CODE (dest) == REG
1023 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1024 return true;
1025 return false;
1028 bool
1029 mep_split_mov (rtx *operands, int symbolic)
1031 if (symbolic)
1033 if (move_needs_splitting (operands[0], operands[1], SImode))
1034 return true;
1035 return false;
1038 if (GET_CODE (operands[1]) != CONST_INT)
1039 return false;
1041 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1042 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1043 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1044 return false;
1046 if (((!reload_completed && !reload_in_progress)
1047 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1048 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1049 return false;
1051 return true;
1054 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1055 it to one specific value. So the insn chosen depends on whether
1056 the source and destination modes match. */
1058 bool
1059 mep_vliw_mode_match (rtx tgt)
1061 bool src_vliw = mep_vliw_function_p (cfun->decl);
1062 bool tgt_vliw = INTVAL (tgt);
1064 return src_vliw == tgt_vliw;
1067 /* Like the above, but also test for near/far mismatches. */
1069 bool
1070 mep_vliw_jmp_match (rtx tgt)
1072 bool src_vliw = mep_vliw_function_p (cfun->decl);
1073 bool tgt_vliw = INTVAL (tgt);
1075 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1076 return false;
1078 return src_vliw == tgt_vliw;
1081 bool
1082 mep_multi_slot (rtx_insn *x)
1084 return get_attr_slot (x) == SLOT_MULTI;
1087 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1089 static bool
1090 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1092 /* We can't convert symbol values to gp- or tp-rel values after
1093 reload, as reload might have used $gp or $tp for other
1094 purposes. */
1095 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1097 char e = mep_section_tag (x);
1098 return (e != 't' && e != 'b');
1100 return 1;
1103 /* Be careful not to use macros that need to be compiled one way for
1104 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1106 bool
1107 mep_legitimate_address (machine_mode mode, rtx x, int strict)
1109 int the_tag;
1111 #define DEBUG_LEGIT 0
1112 #if DEBUG_LEGIT
1113 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1114 debug_rtx (x);
1115 #endif
1117 if (GET_CODE (x) == LO_SUM
1118 && GET_CODE (XEXP (x, 0)) == REG
1119 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1120 && CONSTANT_P (XEXP (x, 1)))
1122 if (GET_MODE_SIZE (mode) > 4)
1124 /* We will end up splitting this, and lo_sums are not
1125 offsettable for us. */
1126 #if DEBUG_LEGIT
1127 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1128 #endif
1129 return false;
1131 #if DEBUG_LEGIT
1132 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1133 #endif
1134 return true;
1137 if (GET_CODE (x) == REG
1138 && GEN_REG (REGNO (x), strict))
1140 #if DEBUG_LEGIT
1141 fprintf (stderr, " - yup, [reg]\n");
1142 #endif
1143 return true;
1146 if (GET_CODE (x) == PLUS
1147 && GET_CODE (XEXP (x, 0)) == REG
1148 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1149 && const_in_range (XEXP (x, 1), -32768, 32767))
1151 #if DEBUG_LEGIT
1152 fprintf (stderr, " - yup, [reg+const]\n");
1153 #endif
1154 return true;
1157 if (GET_CODE (x) == PLUS
1158 && GET_CODE (XEXP (x, 0)) == REG
1159 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1160 && GET_CODE (XEXP (x, 1)) == CONST
1161 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1162 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1163 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1164 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1166 #if DEBUG_LEGIT
1167 fprintf (stderr, " - yup, [reg+unspec]\n");
1168 #endif
1169 return true;
1172 the_tag = mep_section_tag (x);
1174 if (the_tag == 'f')
1176 #if DEBUG_LEGIT
1177 fprintf (stderr, " - nope, [far]\n");
1178 #endif
1179 return false;
1182 if (mode == VOIDmode
1183 && GET_CODE (x) == SYMBOL_REF)
1185 #if DEBUG_LEGIT
1186 fprintf (stderr, " - yup, call [symbol]\n");
1187 #endif
1188 return true;
1191 if ((mode == SImode || mode == SFmode)
1192 && CONSTANT_P (x)
1193 && mep_legitimate_constant_p (mode, x)
1194 && the_tag != 't' && the_tag != 'b')
1196 if (GET_CODE (x) != CONST_INT
1197 || (INTVAL (x) <= 0xfffff
1198 && INTVAL (x) >= 0
1199 && (INTVAL (x) % 4) == 0))
1201 #if DEBUG_LEGIT
1202 fprintf (stderr, " - yup, [const]\n");
1203 #endif
1204 return true;
1208 #if DEBUG_LEGIT
1209 fprintf (stderr, " - nope.\n");
1210 #endif
1211 return false;
1215 mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
1216 int type_i,
1217 int ind_levels ATTRIBUTE_UNUSED)
1219 enum reload_type type = (enum reload_type) type_i;
1221 if (GET_CODE (*x) == PLUS
1222 && GET_CODE (XEXP (*x, 0)) == MEM
1223 && GET_CODE (XEXP (*x, 1)) == REG)
1225 /* GCC will by default copy the MEM into a REG, which results in
1226 an invalid address. For us, the best thing to do is move the
1227 whole expression to a REG. */
1228 push_reload (*x, NULL_RTX, x, NULL,
1229 GENERAL_REGS, mode, VOIDmode,
1230 0, 0, opnum, type);
1231 return 1;
1234 if (GET_CODE (*x) == PLUS
1235 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1236 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1238 char e = mep_section_tag (XEXP (*x, 0));
1240 if (e != 't' && e != 'b')
1242 /* GCC thinks that (sym+const) is a valid address. Well,
1243 sometimes it is, this time it isn't. The best thing to
1244 do is reload the symbol to a register, since reg+int
1245 tends to work, and we can't just add the symbol and
1246 constant anyway. */
1247 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1248 GENERAL_REGS, mode, VOIDmode,
1249 0, 0, opnum, type);
1250 return 1;
1253 return 0;
1257 mep_core_address_length (rtx_insn *insn, int opn)
1259 rtx set = single_set (insn);
1260 rtx mem = XEXP (set, opn);
1261 rtx other = XEXP (set, 1-opn);
1262 rtx addr = XEXP (mem, 0);
1264 if (register_operand (addr, Pmode))
1265 return 2;
1266 if (GET_CODE (addr) == PLUS)
1268 rtx addend = XEXP (addr, 1);
1270 gcc_assert (REG_P (XEXP (addr, 0)));
1272 switch (REGNO (XEXP (addr, 0)))
1274 case STACK_POINTER_REGNUM:
1275 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1276 && mep_imm7a4_operand (addend, VOIDmode))
1277 return 2;
1278 break;
1280 case 13: /* TP */
1281 gcc_assert (REG_P (other));
1283 if (REGNO (other) >= 8)
1284 break;
1286 if (GET_CODE (addend) == CONST
1287 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1288 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1289 return 2;
1291 if (GET_CODE (addend) == CONST_INT
1292 && INTVAL (addend) >= 0
1293 && INTVAL (addend) <= 127
1294 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1295 return 2;
1296 break;
1300 return 4;
1304 mep_cop_address_length (rtx_insn *insn, int opn)
1306 rtx set = single_set (insn);
1307 rtx mem = XEXP (set, opn);
1308 rtx addr = XEXP (mem, 0);
1310 if (GET_CODE (mem) != MEM)
1311 return 2;
1312 if (register_operand (addr, Pmode))
1313 return 2;
1314 if (GET_CODE (addr) == POST_INC)
1315 return 2;
1317 return 4;
1320 #define DEBUG_EXPAND_MOV 0
1321 bool
1322 mep_expand_mov (rtx *operands, machine_mode mode)
1324 int i, t;
1325 int tag[2];
1326 rtx tpsym, tpoffs;
1327 int post_reload = 0;
1329 tag[0] = mep_section_tag (operands[0]);
1330 tag[1] = mep_section_tag (operands[1]);
1332 if (!reload_in_progress
1333 && !reload_completed
1334 && GET_CODE (operands[0]) != REG
1335 && GET_CODE (operands[0]) != SUBREG
1336 && GET_CODE (operands[1]) != REG
1337 && GET_CODE (operands[1]) != SUBREG)
1338 operands[1] = copy_to_mode_reg (mode, operands[1]);
1340 #if DEBUG_EXPAND_MOV
1341 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1342 reload_in_progress || reload_completed);
1343 debug_rtx (operands[0]);
1344 debug_rtx (operands[1]);
1345 #endif
1347 if (mode == DImode || mode == DFmode)
1348 return false;
1350 if (reload_in_progress || reload_completed)
1352 rtx r;
1354 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1355 cfun->machine->reload_changes_tp = true;
1357 if (tag[0] == 't' || tag[1] == 't')
1359 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1360 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1361 post_reload = 1;
1363 if (tag[0] == 'b' || tag[1] == 'b')
1365 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1366 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1367 post_reload = 1;
1369 if (cfun->machine->reload_changes_tp == true)
1370 post_reload = 1;
1373 if (!post_reload)
1375 rtx n;
1376 if (symbol_p (operands[1]))
1378 t = mep_section_tag (operands[1]);
1379 if (t == 'b' || t == 't')
1382 if (GET_CODE (operands[1]) == SYMBOL_REF)
1384 tpsym = operands[1];
1385 n = gen_rtx_UNSPEC (mode,
1386 gen_rtvec (1, operands[1]),
1387 t == 'b' ? UNS_TPREL : UNS_GPREL);
1388 n = gen_rtx_CONST (mode, n);
1390 else if (GET_CODE (operands[1]) == CONST
1391 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1392 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1393 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1395 tpsym = XEXP (XEXP (operands[1], 0), 0);
1396 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1397 n = gen_rtx_UNSPEC (mode,
1398 gen_rtvec (1, tpsym),
1399 t == 'b' ? UNS_TPREL : UNS_GPREL);
1400 n = gen_rtx_PLUS (mode, n, tpoffs);
1401 n = gen_rtx_CONST (mode, n);
1403 else if (GET_CODE (operands[1]) == CONST
1404 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1405 return false;
1406 else
1408 error ("unusual TP-relative address");
1409 return false;
1412 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1413 : mep_gp_rtx ()), n);
1414 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1415 #if DEBUG_EXPAND_MOV
1416 fprintf(stderr, "mep_expand_mov emitting ");
1417 debug_rtx(n);
1418 #endif
1419 return true;
1423 for (i=0; i < 2; i++)
1425 t = mep_section_tag (operands[i]);
1426 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1428 rtx sym, n, r;
1429 int u;
1431 sym = XEXP (operands[i], 0);
1432 if (GET_CODE (sym) == CONST
1433 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1434 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1436 if (t == 'b')
1438 r = mep_tp_rtx ();
1439 u = UNS_TPREL;
1441 else
1443 r = mep_gp_rtx ();
1444 u = UNS_GPREL;
1447 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1448 n = gen_rtx_CONST (Pmode, n);
1449 n = gen_rtx_PLUS (Pmode, r, n);
1450 operands[i] = replace_equiv_address (operands[i], n);
1455 if ((GET_CODE (operands[1]) != REG
1456 && MEP_CONTROL_REG (operands[0]))
1457 || (GET_CODE (operands[0]) != REG
1458 && MEP_CONTROL_REG (operands[1])))
1460 rtx temp;
1461 #if DEBUG_EXPAND_MOV
1462 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1463 #endif
1464 temp = gen_reg_rtx (mode);
1465 emit_move_insn (temp, operands[1]);
1466 operands[1] = temp;
1469 if (symbolref_p (operands[0])
1470 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1471 || (GET_MODE_SIZE (mode) != 4)))
1473 rtx temp;
1475 gcc_assert (!reload_in_progress && !reload_completed);
1477 temp = force_reg (Pmode, XEXP (operands[0], 0));
1478 operands[0] = replace_equiv_address (operands[0], temp);
1479 emit_move_insn (operands[0], operands[1]);
1480 return true;
1483 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1484 tag[1] = 0;
1486 if (symbol_p (operands[1])
1487 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1489 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1490 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1491 return true;
1494 if (symbolref_p (operands[1])
1495 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1497 rtx temp;
1499 if (reload_in_progress || reload_completed)
1500 temp = operands[0];
1501 else
1502 temp = gen_reg_rtx (Pmode);
1504 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1505 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1506 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1507 return true;
1510 return false;
1513 /* Cases where the pattern can't be made to use at all. */
1515 bool
1516 mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
1518 int i;
1520 #define DEBUG_MOV_OK 0
1521 #if DEBUG_MOV_OK
1522 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1523 mep_section_tag (operands[1]));
1524 debug_rtx (operands[0]);
1525 debug_rtx (operands[1]);
1526 #endif
1528 /* We want the movh patterns to get these. */
1529 if (GET_CODE (operands[1]) == HIGH)
1530 return false;
1532 /* We can't store a register to a far variable without using a
1533 scratch register to hold the address. Using far variables should
1534 be split by mep_emit_mov anyway. */
1535 if (mep_section_tag (operands[0]) == 'f'
1536 || mep_section_tag (operands[1]) == 'f')
1538 #if DEBUG_MOV_OK
1539 fprintf (stderr, " - no, f\n");
1540 #endif
1541 return false;
1543 i = mep_section_tag (operands[1]);
1544 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1545 /* These are supposed to be generated with adds of the appropriate
1546 register. During and after reload, however, we allow them to
1547 be accessed as normal symbols because adding a dependency on
1548 the base register now might cause problems. */
1550 #if DEBUG_MOV_OK
1551 fprintf (stderr, " - no, bt\n");
1552 #endif
1553 return false;
1556 /* The only moves we can allow involve at least one general
1557 register, so require it. */
1558 for (i = 0; i < 2; i ++)
1560 /* Allow subregs too, before reload. */
1561 rtx x = operands[i];
1563 if (GET_CODE (x) == SUBREG)
1564 x = XEXP (x, 0);
1565 if (GET_CODE (x) == REG
1566 && ! MEP_CONTROL_REG (x))
1568 #if DEBUG_MOV_OK
1569 fprintf (stderr, " - ok\n");
1570 #endif
1571 return true;
1574 #if DEBUG_MOV_OK
1575 fprintf (stderr, " - no, no gen reg\n");
1576 #endif
1577 return false;
1580 #define DEBUG_SPLIT_WIDE_MOVE 0
1581 void
1582 mep_split_wide_move (rtx *operands, machine_mode mode)
1584 int i;
1586 #if DEBUG_SPLIT_WIDE_MOVE
1587 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1588 debug_rtx (operands[0]);
1589 debug_rtx (operands[1]);
1590 #endif
1592 for (i = 0; i <= 1; i++)
1594 rtx op = operands[i], hi, lo;
1596 switch (GET_CODE (op))
1598 case REG:
1600 unsigned int regno = REGNO (op);
1602 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1604 rtx i32;
1606 lo = gen_rtx_REG (SImode, regno);
1607 i32 = GEN_INT (32);
1608 hi = gen_rtx_ZERO_EXTRACT (SImode,
1609 gen_rtx_REG (DImode, regno),
1610 i32, i32);
1612 else
1614 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1615 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1618 break;
1620 case CONST_INT:
1621 case CONST_DOUBLE:
1622 case MEM:
1623 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1624 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1625 break;
1627 default:
1628 gcc_unreachable ();
1631 /* The high part of CR <- GPR moves must be done after the low part. */
1632 operands [i + 4] = lo;
1633 operands [i + 2] = hi;
1636 if (reg_mentioned_p (operands[2], operands[5])
1637 || GET_CODE (operands[2]) == ZERO_EXTRACT
1638 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1640 rtx tmp;
1642 /* Overlapping register pairs -- make sure we don't
1643 early-clobber ourselves. */
1644 tmp = operands[2];
1645 operands[2] = operands[4];
1646 operands[4] = tmp;
1647 tmp = operands[3];
1648 operands[3] = operands[5];
1649 operands[5] = tmp;
1652 #if DEBUG_SPLIT_WIDE_MOVE
1653 fprintf(stderr, "\033[34m");
1654 debug_rtx (operands[2]);
1655 debug_rtx (operands[3]);
1656 debug_rtx (operands[4]);
1657 debug_rtx (operands[5]);
1658 fprintf(stderr, "\033[0m");
1659 #endif
1662 /* Emit a setcc instruction in its entirity. */
1664 static bool
1665 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1667 rtx tmp;
1669 switch (code)
1671 case GT:
1672 case GTU:
1673 tmp = op1, op1 = op2, op2 = tmp;
1674 code = swap_condition (code);
1675 /* FALLTHRU */
1677 case LT:
1678 case LTU:
1679 op1 = force_reg (SImode, op1);
1680 emit_insn (gen_rtx_SET (VOIDmode, dest,
1681 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1682 return true;
1684 case EQ:
1685 if (op2 != const0_rtx)
1686 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1687 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1688 return true;
1690 case NE:
1691 /* Branchful sequence:
1692 mov dest, 0 16-bit
1693 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1694 mov dest, 1 16-bit
1696 Branchless sequence:
1697 add3 tmp, op1, -op2 32-bit (or mov + sub)
1698 sltu3 tmp, tmp, 1 16-bit
1699 xor3 dest, tmp, 1 32-bit
1701 if (optimize_size && op2 != const0_rtx)
1702 return false;
1704 if (op2 != const0_rtx)
1705 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1707 op2 = gen_reg_rtx (SImode);
1708 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1710 emit_insn (gen_rtx_SET (VOIDmode, dest,
1711 gen_rtx_XOR (SImode, op2, const1_rtx)));
1712 return true;
1714 case LE:
1715 if (GET_CODE (op2) != CONST_INT
1716 || INTVAL (op2) == 0x7ffffff)
1717 return false;
1718 op2 = GEN_INT (INTVAL (op2) + 1);
1719 return mep_expand_setcc_1 (LT, dest, op1, op2);
1721 case LEU:
1722 if (GET_CODE (op2) != CONST_INT
1723 || INTVAL (op2) == -1)
1724 return false;
1725 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1726 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1728 case GE:
1729 if (GET_CODE (op2) != CONST_INT
1730 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1731 return false;
1732 op2 = GEN_INT (INTVAL (op2) - 1);
1733 return mep_expand_setcc_1 (GT, dest, op1, op2);
1735 case GEU:
1736 if (GET_CODE (op2) != CONST_INT
1737 || op2 == const0_rtx)
1738 return false;
1739 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1740 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1742 default:
1743 gcc_unreachable ();
1747 bool
1748 mep_expand_setcc (rtx *operands)
1750 rtx dest = operands[0];
1751 enum rtx_code code = GET_CODE (operands[1]);
1752 rtx op0 = operands[2];
1753 rtx op1 = operands[3];
1755 return mep_expand_setcc_1 (code, dest, op0, op1);
1759 mep_expand_cbranch (rtx *operands)
1761 enum rtx_code code = GET_CODE (operands[0]);
1762 rtx op0 = operands[1];
1763 rtx op1 = operands[2];
1764 rtx tmp;
1766 restart:
1767 switch (code)
1769 case LT:
1770 if (mep_imm4_operand (op1, SImode))
1771 break;
1773 tmp = gen_reg_rtx (SImode);
1774 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1775 code = NE;
1776 op0 = tmp;
1777 op1 = const0_rtx;
1778 break;
1780 case GE:
1781 if (mep_imm4_operand (op1, SImode))
1782 break;
1784 tmp = gen_reg_rtx (SImode);
1785 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1787 code = EQ;
1788 op0 = tmp;
1789 op1 = const0_rtx;
1790 break;
1792 case EQ:
1793 case NE:
1794 if (! mep_reg_or_imm4_operand (op1, SImode))
1795 op1 = force_reg (SImode, op1);
1796 break;
1798 case LE:
1799 case GT:
1800 if (GET_CODE (op1) == CONST_INT
1801 && INTVAL (op1) != 0x7fffffff)
1803 op1 = GEN_INT (INTVAL (op1) + 1);
1804 code = (code == LE ? LT : GE);
1805 goto restart;
1808 tmp = gen_reg_rtx (SImode);
1809 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1811 code = (code == LE ? EQ : NE);
1812 op0 = tmp;
1813 op1 = const0_rtx;
1814 break;
1816 case LTU:
1817 if (op1 == const1_rtx)
1819 code = EQ;
1820 op1 = const0_rtx;
1821 break;
1824 tmp = gen_reg_rtx (SImode);
1825 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1826 code = NE;
1827 op0 = tmp;
1828 op1 = const0_rtx;
1829 break;
1831 case LEU:
1832 tmp = gen_reg_rtx (SImode);
1833 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1834 code = NE;
1835 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1836 code = EQ;
1837 else
1838 gcc_unreachable ();
1839 op0 = tmp;
1840 op1 = const0_rtx;
1841 break;
1843 case GTU:
1844 tmp = gen_reg_rtx (SImode);
1845 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1846 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1847 code = NE;
1848 op0 = tmp;
1849 op1 = const0_rtx;
1850 break;
1852 case GEU:
1853 tmp = gen_reg_rtx (SImode);
1854 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1855 code = NE;
1856 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1857 code = EQ;
1858 else
1859 gcc_unreachable ();
1860 op0 = tmp;
1861 op1 = const0_rtx;
1862 break;
1864 default:
1865 gcc_unreachable ();
1868 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1871 const char *
1872 mep_emit_cbranch (rtx *operands, int ne)
1874 if (GET_CODE (operands[1]) == REG)
1875 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1876 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1877 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1878 else
1879 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1882 void
1883 mep_expand_call (rtx *operands, int returns_value)
1885 rtx addr = operands[returns_value];
1886 rtx tp = mep_tp_rtx ();
1887 rtx gp = mep_gp_rtx ();
1889 gcc_assert (GET_CODE (addr) == MEM);
1891 addr = XEXP (addr, 0);
1893 if (! mep_call_address_operand (addr, VOIDmode))
1894 addr = force_reg (SImode, addr);
1896 if (! operands[returns_value+2])
1897 operands[returns_value+2] = const0_rtx;
1899 if (returns_value)
1900 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1901 operands[3], tp, gp));
1902 else
1903 emit_call_insn (gen_call_internal (addr, operands[1],
1904 operands[2], tp, gp));
1907 /* Aliasing Support. */
1909 /* If X is a machine specific address (i.e. a symbol or label being
1910 referenced as a displacement from the GOT implemented using an
1911 UNSPEC), then return the base term. Otherwise return X. */
1914 mep_find_base_term (rtx x)
1916 rtx base, term;
1917 int unspec;
1919 if (GET_CODE (x) != PLUS)
1920 return x;
1921 base = XEXP (x, 0);
1922 term = XEXP (x, 1);
1924 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1925 && base == mep_tp_rtx ())
1926 unspec = UNS_TPREL;
1927 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1928 && base == mep_gp_rtx ())
1929 unspec = UNS_GPREL;
1930 else
1931 return x;
1933 if (GET_CODE (term) != CONST)
1934 return x;
1935 term = XEXP (term, 0);
1937 if (GET_CODE (term) != UNSPEC
1938 || XINT (term, 1) != unspec)
1939 return x;
1941 return XVECEXP (term, 0, 0);
1944 /* Reload Support. */
1946 /* Return true if the registers in CLASS cannot represent the change from
1947 modes FROM to TO. */
1949 bool
1950 mep_cannot_change_mode_class (machine_mode from, machine_mode to,
1951 enum reg_class regclass)
1953 if (from == to)
1954 return false;
1956 /* 64-bit COP regs must remain 64-bit COP regs. */
1957 if (TARGET_64BIT_CR_REGS
1958 && (regclass == CR_REGS
1959 || regclass == LOADABLE_CR_REGS)
1960 && (GET_MODE_SIZE (to) < 8
1961 || GET_MODE_SIZE (from) < 8))
1962 return true;
1964 return false;
1967 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1969 static bool
1970 mep_general_reg (rtx x)
1972 while (GET_CODE (x) == SUBREG)
1973 x = XEXP (x, 0);
1974 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1977 static bool
1978 mep_nongeneral_reg (rtx x)
1980 while (GET_CODE (x) == SUBREG)
1981 x = XEXP (x, 0);
1982 return (GET_CODE (x) == REG
1983 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1986 static bool
1987 mep_general_copro_reg (rtx x)
1989 while (GET_CODE (x) == SUBREG)
1990 x = XEXP (x, 0);
1991 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1994 static bool
1995 mep_nonregister (rtx x)
1997 while (GET_CODE (x) == SUBREG)
1998 x = XEXP (x, 0);
1999 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2002 #define DEBUG_RELOAD 0
2004 /* Return the secondary reload class needed for moving value X to or
2005 from a register in coprocessor register class CLASS. */
2007 static enum reg_class
2008 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2010 if (mep_general_reg (x))
2011 /* We can do the move directly if mep_have_core_copro_moves_p,
2012 otherwise we need to go through memory. Either way, no secondary
2013 register is needed. */
2014 return NO_REGS;
2016 if (mep_general_copro_reg (x))
2018 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2019 if (mep_have_copro_copro_moves_p)
2020 return NO_REGS;
2022 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2023 if (mep_have_core_copro_moves_p)
2024 return GENERAL_REGS;
2026 /* Otherwise we need to do it through memory. No secondary
2027 register is needed. */
2028 return NO_REGS;
2031 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2032 && constraint_satisfied_p (x, CONSTRAINT_U))
2033 /* X is a memory value that we can access directly. */
2034 return NO_REGS;
2036 /* We have to move X into a GPR first and then copy it to
2037 the coprocessor register. The move from the GPR to the
2038 coprocessor might be done directly or through memory,
2039 depending on mep_have_core_copro_moves_p. */
2040 return GENERAL_REGS;
2043 /* Copying X to register in RCLASS. */
2045 enum reg_class
2046 mep_secondary_input_reload_class (enum reg_class rclass,
2047 machine_mode mode ATTRIBUTE_UNUSED,
2048 rtx x)
2050 int rv = NO_REGS;
2052 #if DEBUG_RELOAD
2053 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2054 debug_rtx (x);
2055 #endif
2057 if (reg_class_subset_p (rclass, CR_REGS))
2058 rv = mep_secondary_copro_reload_class (rclass, x);
2059 else if (MEP_NONGENERAL_CLASS (rclass)
2060 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2061 rv = GENERAL_REGS;
2063 #if DEBUG_RELOAD
2064 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2065 #endif
2066 return (enum reg_class) rv;
2069 /* Copying register in RCLASS to X. */
2071 enum reg_class
2072 mep_secondary_output_reload_class (enum reg_class rclass,
2073 machine_mode mode ATTRIBUTE_UNUSED,
2074 rtx x)
2076 int rv = NO_REGS;
2078 #if DEBUG_RELOAD
2079 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2080 debug_rtx (x);
2081 #endif
2083 if (reg_class_subset_p (rclass, CR_REGS))
2084 rv = mep_secondary_copro_reload_class (rclass, x);
2085 else if (MEP_NONGENERAL_CLASS (rclass)
2086 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2087 rv = GENERAL_REGS;
2089 #if DEBUG_RELOAD
2090 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2091 #endif
2093 return (enum reg_class) rv;
2096 /* Implement SECONDARY_MEMORY_NEEDED. */
2098 bool
2099 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2100 machine_mode mode ATTRIBUTE_UNUSED)
2102 if (!mep_have_core_copro_moves_p)
2104 if (reg_classes_intersect_p (rclass1, CR_REGS)
2105 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2106 return true;
2107 if (reg_classes_intersect_p (rclass2, CR_REGS)
2108 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2109 return true;
2110 if (!mep_have_copro_copro_moves_p
2111 && reg_classes_intersect_p (rclass1, CR_REGS)
2112 && reg_classes_intersect_p (rclass2, CR_REGS))
2113 return true;
2115 return false;
2118 void
2119 mep_expand_reload (rtx *operands, machine_mode mode)
2121 /* There are three cases for each direction:
2122 register, farsym
2123 control, farsym
2124 control, nearsym */
2126 int s0 = mep_section_tag (operands[0]) == 'f';
2127 int s1 = mep_section_tag (operands[1]) == 'f';
2128 int c0 = mep_nongeneral_reg (operands[0]);
2129 int c1 = mep_nongeneral_reg (operands[1]);
2130 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2132 #if DEBUG_RELOAD
2133 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2134 debug_rtx (operands[0]);
2135 debug_rtx (operands[1]);
2136 #endif
2138 switch (which)
2140 case 00: /* Don't know why this gets here. */
2141 case 02: /* general = far */
2142 emit_move_insn (operands[0], operands[1]);
2143 return;
2145 case 10: /* cr = mem */
2146 case 11: /* cr = cr */
2147 case 01: /* mem = cr */
2148 case 12: /* cr = far */
2149 emit_move_insn (operands[2], operands[1]);
2150 emit_move_insn (operands[0], operands[2]);
2151 return;
2153 case 20: /* far = general */
2154 emit_move_insn (operands[2], XEXP (operands[1], 0));
2155 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2156 return;
2158 case 21: /* far = cr */
2159 case 22: /* far = far */
2160 default:
2161 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2162 which, mode_name[mode]);
2163 debug_rtx (operands[0]);
2164 debug_rtx (operands[1]);
2165 gcc_unreachable ();
2169 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2170 can be moved directly into registers 0 to 7, but not into the rest.
2171 If so, and if the required class includes registers 0 to 7, restrict
2172 it to those registers. */
2174 enum reg_class
2175 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2177 switch (GET_CODE (x))
2179 case CONST_INT:
2180 if (INTVAL (x) >= 0x10000
2181 && INTVAL (x) < 0x01000000
2182 && (INTVAL (x) & 0xffff) != 0
2183 && reg_class_subset_p (TPREL_REGS, rclass))
2184 rclass = TPREL_REGS;
2185 break;
2187 case CONST:
2188 case SYMBOL_REF:
2189 case LABEL_REF:
2190 if (mep_section_tag (x) != 'f'
2191 && reg_class_subset_p (TPREL_REGS, rclass))
2192 rclass = TPREL_REGS;
2193 break;
2195 default:
2196 break;
2198 return rclass;
2201 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2202 moves, 4 for direct double-register moves, and 1000 for anything
2203 that requires a temporary register or temporary stack slot. */
2206 mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
2208 if (mep_have_copro_copro_moves_p
2209 && reg_class_subset_p (from, CR_REGS)
2210 && reg_class_subset_p (to, CR_REGS))
2212 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2213 return 4;
2214 return 2;
2216 if (reg_class_subset_p (from, CR_REGS)
2217 && reg_class_subset_p (to, CR_REGS))
2219 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2220 return 8;
2221 return 4;
2223 if (reg_class_subset_p (from, CR_REGS)
2224 || reg_class_subset_p (to, CR_REGS))
2226 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2227 return 4;
2228 return 2;
2230 if (mep_secondary_memory_needed (from, to, mode))
2231 return 1000;
2232 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2233 return 1000;
2235 if (GET_MODE_SIZE (mode) > 4)
2236 return 4;
2238 return 2;
2242 /* Functions to save and restore machine-specific function data. */
2244 static struct machine_function *
2245 mep_init_machine_status (void)
2247 return ggc_cleared_alloc<machine_function> ();
2250 static rtx
2251 mep_allocate_initial_value (rtx reg)
2253 int rss;
2255 if (GET_CODE (reg) != REG)
2256 return NULL_RTX;
2258 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2259 return NULL_RTX;
2261 /* In interrupt functions, the "initial" values of $gp and $tp are
2262 provided by the prologue. They are not necessarily the same as
2263 the values that the caller was using. */
2264 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2265 if (mep_interrupt_p ())
2266 return NULL_RTX;
2268 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2270 cfun->machine->reg_save_size += 4;
2271 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2274 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2275 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2279 mep_return_addr_rtx (int count)
2281 if (count != 0)
2282 return const0_rtx;
2284 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2287 static rtx
2288 mep_tp_rtx (void)
2290 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2293 static rtx
2294 mep_gp_rtx (void)
2296 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2299 static bool
2300 mep_interrupt_p (void)
2302 if (cfun->machine->interrupt_handler == 0)
2304 int interrupt_handler
2305 = (lookup_attribute ("interrupt",
2306 DECL_ATTRIBUTES (current_function_decl))
2307 != NULL_TREE);
2308 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2310 return cfun->machine->interrupt_handler == 2;
2313 static bool
2314 mep_disinterrupt_p (void)
2316 if (cfun->machine->disable_interrupts == 0)
2318 int disable_interrupts
2319 = (lookup_attribute ("disinterrupt",
2320 DECL_ATTRIBUTES (current_function_decl))
2321 != NULL_TREE);
2322 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2324 return cfun->machine->disable_interrupts == 2;
2328 /* Frame/Epilog/Prolog Related. */
2330 static bool
2331 mep_reg_set_p (rtx reg, rtx insn)
2333 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2334 if (INSN_P (insn))
2336 if (FIND_REG_INC_NOTE (insn, reg))
2337 return true;
2338 insn = PATTERN (insn);
2341 if (GET_CODE (insn) == SET
2342 && GET_CODE (XEXP (insn, 0)) == REG
2343 && GET_CODE (XEXP (insn, 1)) == REG
2344 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2345 return false;
2347 return set_of (reg, insn) != NULL_RTX;
2351 #define MEP_SAVES_UNKNOWN 0
2352 #define MEP_SAVES_YES 1
2353 #define MEP_SAVES_MAYBE 2
2354 #define MEP_SAVES_NO 3
2356 static bool
2357 mep_reg_set_in_function (int regno)
2359 rtx reg;
2360 rtx_insn *insn;
2362 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2363 return true;
2365 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2366 return true;
2368 push_topmost_sequence ();
2369 insn = get_insns ();
2370 pop_topmost_sequence ();
2372 if (!insn)
2373 return false;
2375 reg = gen_rtx_REG (SImode, regno);
2377 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2378 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2379 return true;
2380 return false;
2383 static bool
2384 mep_asm_without_operands_p (void)
2386 if (cfun->machine->asms_without_operands == 0)
2388 rtx_insn *insn;
2390 push_topmost_sequence ();
2391 insn = get_insns ();
2392 pop_topmost_sequence ();
2394 cfun->machine->asms_without_operands = 1;
2395 while (insn)
2397 if (INSN_P (insn)
2398 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2400 cfun->machine->asms_without_operands = 2;
2401 break;
2403 insn = NEXT_INSN (insn);
2407 return cfun->machine->asms_without_operands == 2;
2410 /* Interrupt functions save/restore every call-preserved register, and
2411 any call-used register it uses (or all if it calls any function,
2412 since they may get clobbered there too). Here we check to see
2413 which call-used registers need saving. */
2415 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2416 && (r == FIRST_CCR_REGNO + 1 \
2417 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2418 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2420 static bool
2421 mep_interrupt_saved_reg (int r)
2423 if (!mep_interrupt_p ())
2424 return false;
2425 if (r == REGSAVE_CONTROL_TEMP
2426 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2427 return true;
2428 if (mep_asm_without_operands_p ()
2429 && (!fixed_regs[r]
2430 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2431 || IVC2_ISAVED_REG (r)))
2432 return true;
2433 if (!crtl->is_leaf)
2434 /* Function calls mean we need to save $lp. */
2435 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2436 return true;
2437 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2438 /* The interrupt handler might use these registers for repeat blocks,
2439 or it might call a function that does so. */
2440 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2441 return true;
2442 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2443 return false;
2444 /* Functions we call might clobber these. */
2445 if (call_used_regs[r] && !fixed_regs[r])
2446 return true;
2447 /* Additional registers that need to be saved for IVC2. */
2448 if (IVC2_ISAVED_REG (r))
2449 return true;
2451 return false;
2454 static bool
2455 mep_call_saves_register (int r)
2457 if (! cfun->machine->frame_locked)
2459 int rv = MEP_SAVES_NO;
2461 if (cfun->machine->reg_save_slot[r])
2462 rv = MEP_SAVES_YES;
2463 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2464 rv = MEP_SAVES_YES;
2465 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2466 rv = MEP_SAVES_YES;
2467 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2468 rv = MEP_SAVES_YES;
2469 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2470 /* We need these to have stack slots so that they can be set during
2471 unwinding. */
2472 rv = MEP_SAVES_YES;
2473 else if (mep_interrupt_saved_reg (r))
2474 rv = MEP_SAVES_YES;
2475 cfun->machine->reg_saved[r] = rv;
2477 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2480 /* Return true if epilogue uses register REGNO. */
2482 bool
2483 mep_epilogue_uses (int regno)
2485 /* Since $lp is a call-saved register, the generic code will normally
2486 mark it used in the epilogue if it needs to be saved and restored.
2487 However, when profiling is enabled, the profiling code will implicitly
2488 clobber $11. This case has to be handled specially both here and in
2489 mep_call_saves_register. */
2490 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2491 return true;
2492 /* Interrupt functions save/restore pretty much everything. */
2493 return (reload_completed && mep_interrupt_saved_reg (regno));
2496 static int
2497 mep_reg_size (int regno)
2499 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2500 return 8;
2501 return 4;
2504 /* Worker function for TARGET_CAN_ELIMINATE. */
2506 bool
2507 mep_can_eliminate (const int from, const int to)
2509 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2510 ? ! frame_pointer_needed
2511 : true);
2515 mep_elimination_offset (int from, int to)
2517 int reg_save_size;
2518 int i;
2519 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2520 int total_size;
2522 if (!cfun->machine->frame_locked)
2523 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2525 /* We don't count arg_regs_to_save in the arg pointer offset, because
2526 gcc thinks the arg pointer has moved along with the saved regs.
2527 However, we do count it when we adjust $sp in the prologue. */
2528 reg_save_size = 0;
2529 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2530 if (mep_call_saves_register (i))
2531 reg_save_size += mep_reg_size (i);
2533 if (reg_save_size % 8)
2534 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2535 else
2536 cfun->machine->regsave_filler = 0;
2538 /* This is what our total stack adjustment looks like. */
2539 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2541 if (total_size % 8)
2542 cfun->machine->frame_filler = 8 - (total_size % 8);
2543 else
2544 cfun->machine->frame_filler = 0;
2547 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2548 return reg_save_size + cfun->machine->regsave_filler;
2550 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2551 return cfun->machine->frame_filler + frame_size;
2553 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2554 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2556 gcc_unreachable ();
2559 static rtx_insn *
2560 F (rtx_insn *x)
2562 RTX_FRAME_RELATED_P (x) = 1;
2563 return x;
2566 /* Since the prologue/epilogue code is generated after optimization,
2567 we can't rely on gcc to split constants for us. So, this code
2568 captures all the ways to add a constant to a register in one logic
2569 chunk, including optimizing away insns we just don't need. This
2570 makes the prolog/epilog code easier to follow. */
2571 static void
2572 add_constant (int dest, int src, int value, int mark_frame)
2574 rtx_insn *insn;
2575 int hi, lo;
2577 if (src == dest && value == 0)
2578 return;
2580 if (value == 0)
2582 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2583 gen_rtx_REG (SImode, src));
2584 if (mark_frame)
2585 RTX_FRAME_RELATED_P(insn) = 1;
2586 return;
2589 if (value >= -32768 && value <= 32767)
2591 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2592 gen_rtx_REG (SImode, src),
2593 GEN_INT (value)));
2594 if (mark_frame)
2595 RTX_FRAME_RELATED_P(insn) = 1;
2596 return;
2599 /* Big constant, need to use a temp register. We use
2600 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2601 area is always small enough to directly add to). */
2603 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2604 lo = value & 0xffff;
2606 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2607 GEN_INT (hi));
2609 if (lo)
2611 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2612 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2613 GEN_INT (lo)));
2616 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2617 gen_rtx_REG (SImode, src),
2618 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2619 if (mark_frame)
2621 RTX_FRAME_RELATED_P(insn) = 1;
2622 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2623 gen_rtx_SET (SImode,
2624 gen_rtx_REG (SImode, dest),
2625 gen_rtx_PLUS (SImode,
2626 gen_rtx_REG (SImode, dest),
2627 GEN_INT (value))));
2631 /* Move SRC to DEST. Mark the move as being potentially dead if
2632 MAYBE_DEAD_P. */
2634 static rtx_insn *
2635 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2637 rtx_insn *insn = emit_move_insn (dest, src);
2638 #if 0
2639 if (maybe_dead_p)
2640 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2641 #endif
2642 return insn;
2645 /* Used for interrupt functions, which can't assume that $tp and $gp
2646 contain the correct pointers. */
2648 static void
2649 mep_reload_pointer (int regno, const char *symbol)
2651 rtx reg, sym;
2653 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2654 return;
2656 reg = gen_rtx_REG (SImode, regno);
2657 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2658 emit_insn (gen_movsi_topsym_s (reg, sym));
2659 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2662 /* Assign save slots for any register not already saved. DImode
2663 registers go at the end of the reg save area; the rest go at the
2664 beginning. This is for alignment purposes. Returns true if a frame
2665 is really needed. */
2666 static bool
2667 mep_assign_save_slots (int reg_save_size)
2669 bool really_need_stack_frame = false;
2670 int di_ofs = 0;
2671 int i;
2673 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2674 if (mep_call_saves_register(i))
2676 int regsize = mep_reg_size (i);
2678 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2679 || mep_reg_set_in_function (i))
2680 really_need_stack_frame = true;
2682 if (cfun->machine->reg_save_slot[i])
2683 continue;
2685 if (regsize < 8)
2687 cfun->machine->reg_save_size += regsize;
2688 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2690 else
2692 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2693 di_ofs += 8;
2696 cfun->machine->frame_locked = 1;
2697 return really_need_stack_frame;
2700 void
2701 mep_expand_prologue (void)
2703 int i, rss, sp_offset = 0;
2704 int reg_save_size;
2705 int frame_size;
2706 int really_need_stack_frame;
2708 /* We must not allow register renaming in interrupt functions,
2709 because that invalidates the correctness of the set of call-used
2710 registers we're going to save/restore. */
2711 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2713 if (mep_disinterrupt_p ())
2714 emit_insn (gen_mep_disable_int ());
2716 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2718 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2719 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2720 really_need_stack_frame = frame_size;
2722 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2724 sp_offset = reg_save_size;
2725 if (sp_offset + frame_size < 128)
2726 sp_offset += frame_size ;
2728 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2730 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2731 if (mep_call_saves_register(i))
2733 rtx mem;
2734 bool maybe_dead_p;
2735 machine_mode rmode;
2737 rss = cfun->machine->reg_save_slot[i];
2739 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2740 && (!mep_reg_set_in_function (i)
2741 && !mep_interrupt_p ()))
2742 continue;
2744 if (mep_reg_size (i) == 8)
2745 rmode = DImode;
2746 else
2747 rmode = SImode;
2749 /* If there is a pseudo associated with this register's initial value,
2750 reload might have already spilt it to the stack slot suggested by
2751 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2752 deleted as dead. */
2753 mem = gen_rtx_MEM (rmode,
2754 plus_constant (Pmode, stack_pointer_rtx,
2755 sp_offset - rss));
2756 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2758 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2759 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2760 else if (rmode == DImode)
2762 rtx_insn *insn;
2763 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2765 mem = gen_rtx_MEM (SImode,
2766 plus_constant (Pmode, stack_pointer_rtx,
2767 sp_offset - rss + be));
2769 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2770 gen_rtx_REG (SImode, i),
2771 maybe_dead_p);
2772 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2773 gen_rtx_ZERO_EXTRACT (SImode,
2774 gen_rtx_REG (DImode, i),
2775 GEN_INT (32),
2776 GEN_INT (32)),
2777 maybe_dead_p);
2778 insn = maybe_dead_move (mem,
2779 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2780 maybe_dead_p);
2781 RTX_FRAME_RELATED_P (insn) = 1;
2783 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2784 gen_rtx_SET (VOIDmode,
2785 copy_rtx (mem),
2786 gen_rtx_REG (rmode, i)));
2787 mem = gen_rtx_MEM (SImode,
2788 plus_constant (Pmode, stack_pointer_rtx,
2789 sp_offset - rss + (4-be)));
2790 insn = maybe_dead_move (mem,
2791 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2792 maybe_dead_p);
2794 else
2796 rtx_insn *insn;
2797 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2798 gen_rtx_REG (rmode, i),
2799 maybe_dead_p);
2800 insn = maybe_dead_move (mem,
2801 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2802 maybe_dead_p);
2803 RTX_FRAME_RELATED_P (insn) = 1;
2805 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2806 gen_rtx_SET (VOIDmode,
2807 copy_rtx (mem),
2808 gen_rtx_REG (rmode, i)));
2812 if (frame_pointer_needed)
2814 /* We've already adjusted down by sp_offset. Total $sp change
2815 is reg_save_size + frame_size. We want a net change here of
2816 just reg_save_size. */
2817 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2820 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2822 if (mep_interrupt_p ())
2824 mep_reload_pointer(GP_REGNO, "__sdabase");
2825 mep_reload_pointer(TP_REGNO, "__tpbase");
2829 static void
2830 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2832 int local = hwi_local;
2833 int frame_size = local + crtl->outgoing_args_size;
2834 int reg_save_size;
2835 int ffill;
2836 int i, sp, skip;
2837 int sp_offset;
2838 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2840 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2841 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2842 sp_offset = reg_save_size + frame_size;
2844 ffill = cfun->machine->frame_filler;
2846 if (cfun->machine->mep_frame_pointer_needed)
2847 reg_names[FP_REGNO] = "$fp";
2848 else
2849 reg_names[FP_REGNO] = "$8";
2851 if (sp_offset == 0)
2852 return;
2854 if (debug_info_level == DINFO_LEVEL_NONE)
2856 fprintf (file, "\t# frame: %d", sp_offset);
2857 if (reg_save_size)
2858 fprintf (file, " %d regs", reg_save_size);
2859 if (local)
2860 fprintf (file, " %d locals", local);
2861 if (crtl->outgoing_args_size)
2862 fprintf (file, " %d args", crtl->outgoing_args_size);
2863 fprintf (file, "\n");
2864 return;
2867 fprintf (file, "\t#\n");
2868 fprintf (file, "\t# Initial Frame Information:\n");
2869 if (sp_offset || !frame_pointer_needed)
2870 fprintf (file, "\t# Entry ---------- 0\n");
2872 /* Sort registers by save slots, so they're printed in the order
2873 they appear in memory, not the order they're saved in. */
2874 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2875 slot_map[si] = si;
2876 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2877 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2878 if (cfun->machine->reg_save_slot[slot_map[si]]
2879 > cfun->machine->reg_save_slot[slot_map[sj]])
2881 int t = slot_map[si];
2882 slot_map[si] = slot_map[sj];
2883 slot_map[sj] = t;
2886 sp = 0;
2887 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2889 int rsize;
2890 int r = slot_map[i];
2891 int rss = cfun->machine->reg_save_slot[r];
2893 if (!mep_call_saves_register (r))
2894 continue;
2896 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2897 && (!mep_reg_set_in_function (r)
2898 && !mep_interrupt_p ()))
2899 continue;
2901 rsize = mep_reg_size(r);
2902 skip = rss - (sp+rsize);
2903 if (skip)
2904 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2905 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2906 rsize, reg_names[r], sp_offset - rss);
2907 sp = rss;
2910 skip = reg_save_size - sp;
2911 if (skip)
2912 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2914 if (frame_pointer_needed)
2915 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2916 if (local)
2917 fprintf (file, "\t# %3d bytes for local vars\n", local);
2918 if (ffill)
2919 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2920 if (crtl->outgoing_args_size)
2921 fprintf (file, "\t# %3d bytes for outgoing args\n",
2922 crtl->outgoing_args_size);
2923 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2924 fprintf (file, "\t#\n");
2928 static int mep_prevent_lp_restore = 0;
2929 static int mep_sibcall_epilogue = 0;
2931 void
2932 mep_expand_epilogue (void)
2934 int i, sp_offset = 0;
2935 int reg_save_size = 0;
2936 int frame_size;
2937 int lp_temp = LP_REGNO, lp_slot = -1;
2938 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2939 int interrupt_handler = mep_interrupt_p ();
2941 if (profile_arc_flag == 2)
2942 emit_insn (gen_mep_bb_trace_ret ());
2944 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2945 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2947 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2949 if (frame_pointer_needed)
2951 /* If we have a frame pointer, we won't have a reliable stack
2952 pointer (alloca, you know), so rebase SP from FP */
2953 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2954 gen_rtx_REG (SImode, FP_REGNO));
2955 sp_offset = reg_save_size;
2957 else
2959 /* SP is right under our local variable space. Adjust it if
2960 needed. */
2961 sp_offset = reg_save_size + frame_size;
2962 if (sp_offset >= 128)
2964 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2965 sp_offset -= frame_size;
2969 /* This is backwards so that we restore the control and coprocessor
2970 registers before the temporary registers we use to restore
2971 them. */
2972 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2973 if (mep_call_saves_register (i))
2975 machine_mode rmode;
2976 int rss = cfun->machine->reg_save_slot[i];
2978 if (mep_reg_size (i) == 8)
2979 rmode = DImode;
2980 else
2981 rmode = SImode;
2983 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2984 && !(mep_reg_set_in_function (i) || interrupt_handler))
2985 continue;
2986 if (mep_prevent_lp_restore && i == LP_REGNO)
2987 continue;
2988 if (!mep_prevent_lp_restore
2989 && !interrupt_handler
2990 && (i == 10 || i == 11))
2991 continue;
2993 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2994 emit_move_insn (gen_rtx_REG (rmode, i),
2995 gen_rtx_MEM (rmode,
2996 plus_constant (Pmode, stack_pointer_rtx,
2997 sp_offset - rss)));
2998 else
3000 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3001 /* Defer this one so we can jump indirect rather than
3002 copying the RA to $lp and "ret". EH epilogues
3003 automatically skip this anyway. */
3004 lp_slot = sp_offset-rss;
3005 else
3007 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3008 gen_rtx_MEM (rmode,
3009 plus_constant (Pmode,
3010 stack_pointer_rtx,
3011 sp_offset-rss)));
3012 emit_move_insn (gen_rtx_REG (rmode, i),
3013 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3017 if (lp_slot != -1)
3019 /* Restore this one last so we know it will be in the temp
3020 register when we return by jumping indirectly via the temp. */
3021 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3022 gen_rtx_MEM (SImode,
3023 plus_constant (Pmode, stack_pointer_rtx,
3024 lp_slot)));
3025 lp_temp = REGSAVE_CONTROL_TEMP;
3029 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3031 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3032 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3033 gen_rtx_REG (SImode, SP_REGNO),
3034 cfun->machine->eh_stack_adjust));
3036 if (mep_sibcall_epilogue)
3037 return;
3039 if (mep_disinterrupt_p ())
3040 emit_insn (gen_mep_enable_int ());
3042 if (mep_prevent_lp_restore)
3044 emit_jump_insn (gen_eh_return_internal ());
3045 emit_barrier ();
3047 else if (interrupt_handler)
3048 emit_jump_insn (gen_mep_reti ());
3049 else
3050 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3053 void
3054 mep_expand_eh_return (rtx *operands)
3056 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3058 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3059 emit_move_insn (ra, operands[0]);
3060 operands[0] = ra;
3063 emit_insn (gen_eh_epilogue (operands[0]));
3066 void
3067 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3069 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3070 mep_prevent_lp_restore = 1;
3071 mep_expand_epilogue ();
3072 mep_prevent_lp_restore = 0;
3075 void
3076 mep_expand_sibcall_epilogue (void)
3078 mep_sibcall_epilogue = 1;
3079 mep_expand_epilogue ();
3080 mep_sibcall_epilogue = 0;
3083 static bool
3084 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3086 if (decl == NULL)
3087 return false;
3089 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3090 return false;
3092 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3093 if (mep_interrupt_p () || mep_disinterrupt_p ())
3094 return false;
3096 return true;
3100 mep_return_stackadj_rtx (void)
3102 return gen_rtx_REG (SImode, 10);
3106 mep_return_handler_rtx (void)
3108 return gen_rtx_REG (SImode, LP_REGNO);
3111 void
3112 mep_function_profiler (FILE *file)
3114 /* Always right at the beginning of the function. */
3115 fprintf (file, "\t# mep function profiler\n");
3116 fprintf (file, "\tadd\t$sp, -8\n");
3117 fprintf (file, "\tsw\t$0, ($sp)\n");
3118 fprintf (file, "\tldc\t$0, $lp\n");
3119 fprintf (file, "\tsw\t$0, 4($sp)\n");
3120 fprintf (file, "\tbsr\t__mep_mcount\n");
3121 fprintf (file, "\tlw\t$0, 4($sp)\n");
3122 fprintf (file, "\tstc\t$0, $lp\n");
3123 fprintf (file, "\tlw\t$0, ($sp)\n");
3124 fprintf (file, "\tadd\t$sp, 8\n\n");
3127 const char *
3128 mep_emit_bb_trace_ret (void)
3130 fprintf (asm_out_file, "\t# end of block profiling\n");
3131 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3132 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3133 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3134 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3135 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3136 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3137 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3138 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3139 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3140 return "";
3143 #undef SAVE
3144 #undef RESTORE
3146 /* Operand Printing. */
3148 void
3149 mep_print_operand_address (FILE *stream, rtx address)
3151 if (GET_CODE (address) == MEM)
3152 address = XEXP (address, 0);
3153 else
3154 /* cf: gcc.dg/asm-4.c. */
3155 gcc_assert (GET_CODE (address) == REG);
3157 mep_print_operand (stream, address, 0);
3160 static struct
3162 char code;
3163 const char *pattern;
3164 const char *format;
3166 const conversions[] =
3168 { 0, "r", "0" },
3169 { 0, "m+ri", "3(2)" },
3170 { 0, "mr", "(1)" },
3171 { 0, "ms", "(1)" },
3172 { 0, "ml", "(1)" },
3173 { 0, "mLrs", "%lo(3)(2)" },
3174 { 0, "mLr+si", "%lo(4+5)(2)" },
3175 { 0, "m+ru2s", "%tpoff(5)(2)" },
3176 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3177 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3178 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3179 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3180 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3181 { 0, "mi", "(1)" },
3182 { 0, "m+si", "(2+3)" },
3183 { 0, "m+li", "(2+3)" },
3184 { 0, "i", "0" },
3185 { 0, "s", "0" },
3186 { 0, "+si", "1+2" },
3187 { 0, "+u2si", "%tpoff(3+4)" },
3188 { 0, "+u3si", "%sdaoff(3+4)" },
3189 { 0, "l", "0" },
3190 { 'b', "i", "0" },
3191 { 'B', "i", "0" },
3192 { 'U', "i", "0" },
3193 { 'h', "i", "0" },
3194 { 'h', "Hs", "%hi(1)" },
3195 { 'I', "i", "0" },
3196 { 'I', "u2s", "%tpoff(2)" },
3197 { 'I', "u3s", "%sdaoff(2)" },
3198 { 'I', "+u2si", "%tpoff(3+4)" },
3199 { 'I', "+u3si", "%sdaoff(3+4)" },
3200 { 'J', "i", "0" },
3201 { 'P', "mr", "(1\\+),\\0" },
3202 { 'x', "i", "0" },
3203 { 0, 0, 0 }
3206 static int
3207 unique_bit_in (HOST_WIDE_INT i)
3209 switch (i & 0xff)
3211 case 0x01: case 0xfe: return 0;
3212 case 0x02: case 0xfd: return 1;
3213 case 0x04: case 0xfb: return 2;
3214 case 0x08: case 0xf7: return 3;
3215 case 0x10: case 0x7f: return 4;
3216 case 0x20: case 0xbf: return 5;
3217 case 0x40: case 0xdf: return 6;
3218 case 0x80: case 0xef: return 7;
3219 default:
3220 gcc_unreachable ();
3224 static int
3225 bit_size_for_clip (HOST_WIDE_INT i)
3227 int rv;
3229 for (rv = 0; rv < 31; rv ++)
3230 if (((HOST_WIDE_INT) 1 << rv) > i)
3231 return rv + 1;
3232 gcc_unreachable ();
3235 /* Print an operand to a assembler instruction. */
3237 void
3238 mep_print_operand (FILE *file, rtx x, int code)
3240 int i, j;
3241 const char *real_name;
3243 if (code == '<')
3245 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3246 we're using, then skip over the "mep_" part of its name. */
3247 const struct cgen_insn *insn;
3249 if (mep_get_move_insn (mep_cmov, &insn))
3250 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3251 else
3252 mep_intrinsic_unavailable (mep_cmov);
3253 return;
3255 if (code == 'L')
3257 switch (GET_CODE (x))
3259 case AND:
3260 fputs ("clr", file);
3261 return;
3262 case IOR:
3263 fputs ("set", file);
3264 return;
3265 case XOR:
3266 fputs ("not", file);
3267 return;
3268 default:
3269 output_operand_lossage ("invalid %%L code");
3272 if (code == 'M')
3274 /* Print the second operand of a CR <- CR move. If we're using
3275 a two-operand instruction (i.e., a real cmov), then just print
3276 the operand normally. If we're using a "reg, reg, immediate"
3277 instruction such as caddi3, print the operand followed by a
3278 zero field. If we're using a three-register instruction,
3279 print the operand twice. */
3280 const struct cgen_insn *insn;
3282 mep_print_operand (file, x, 0);
3283 if (mep_get_move_insn (mep_cmov, &insn)
3284 && insn_data[insn->icode].n_operands == 3)
3286 fputs (", ", file);
3287 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3288 mep_print_operand (file, x, 0);
3289 else
3290 mep_print_operand (file, const0_rtx, 0);
3292 return;
3295 encode_pattern (x);
3296 for (i = 0; conversions[i].pattern; i++)
3297 if (conversions[i].code == code
3298 && strcmp(conversions[i].pattern, pattern) == 0)
3300 for (j = 0; conversions[i].format[j]; j++)
3301 if (conversions[i].format[j] == '\\')
3303 fputc (conversions[i].format[j+1], file);
3304 j++;
3306 else if (ISDIGIT(conversions[i].format[j]))
3308 rtx r = patternr[conversions[i].format[j] - '0'];
3309 switch (GET_CODE (r))
3311 case REG:
3312 fprintf (file, "%s", reg_names [REGNO (r)]);
3313 break;
3314 case CONST_INT:
3315 switch (code)
3317 case 'b':
3318 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3319 break;
3320 case 'B':
3321 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3322 break;
3323 case 'h':
3324 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3325 break;
3326 case 'U':
3327 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3328 break;
3329 case 'J':
3330 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3331 break;
3332 case 'x':
3333 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3334 && !(INTVAL (r) & 0xff))
3335 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3336 else
3337 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3338 break;
3339 case 'I':
3340 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3341 && conversions[i].format[j+1] == 0)
3343 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3344 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3346 else
3347 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3348 break;
3349 default:
3350 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3351 break;
3353 break;
3354 case CONST_DOUBLE:
3355 fprintf(file, "[const_double 0x%lx]",
3356 (unsigned long) CONST_DOUBLE_HIGH(r));
3357 break;
3358 case SYMBOL_REF:
3359 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3360 assemble_name (file, real_name);
3361 break;
3362 case LABEL_REF:
3363 output_asm_label (r);
3364 break;
3365 default:
3366 fprintf (stderr, "don't know how to print this operand:");
3367 debug_rtx (r);
3368 gcc_unreachable ();
3371 else
3373 if (conversions[i].format[j] == '+'
3374 && (!code || code == 'I')
3375 && ISDIGIT (conversions[i].format[j+1])
3376 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3377 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3378 continue;
3379 fputc(conversions[i].format[j], file);
3381 break;
3383 if (!conversions[i].pattern)
3385 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3386 debug_rtx(x);
3389 return;
3392 void
3393 mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
3394 int noperands ATTRIBUTE_UNUSED)
3396 /* Despite the fact that MeP is perfectly capable of branching and
3397 doing something else in the same bundle, gcc does jump
3398 optimization *after* scheduling, so we cannot trust the bundling
3399 flags on jump instructions. */
3400 if (GET_MODE (insn) == BImode
3401 && get_attr_slots (insn) != SLOTS_CORE)
3402 fputc ('+', asm_out_file);
3405 /* Function args in registers. */
3407 static void
3408 mep_setup_incoming_varargs (cumulative_args_t cum,
3409 machine_mode mode ATTRIBUTE_UNUSED,
3410 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3411 int second_time ATTRIBUTE_UNUSED)
3413 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3415 if (nsave > 0)
3416 cfun->machine->arg_regs_to_save = nsave;
3417 *pretend_size = nsave * 4;
3420 static int
3421 bytesize (const_tree type, machine_mode mode)
3423 if (mode == BLKmode)
3424 return int_size_in_bytes (type);
3425 return GET_MODE_SIZE (mode);
3428 static rtx
3429 mep_expand_builtin_saveregs (void)
3431 int bufsize, i, ns;
3432 rtx regbuf;
3434 ns = cfun->machine->arg_regs_to_save;
3435 if (TARGET_IVC2)
3437 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3438 regbuf = assign_stack_local (SImode, bufsize, 64);
3440 else
3442 bufsize = ns * 4;
3443 regbuf = assign_stack_local (SImode, bufsize, 32);
3446 move_block_from_reg (5-ns, regbuf, ns);
3448 if (TARGET_IVC2)
3450 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3451 int ofs = 8 * ((ns+1)/2);
3453 for (i=0; i<ns; i++)
3455 int rn = (4-ns) + i + 49;
3456 rtx ptr;
3458 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3459 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3460 ofs += 8;
3463 return XEXP (regbuf, 0);
3466 static tree
3467 mep_build_builtin_va_list (void)
3469 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3470 tree record;
3473 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3475 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3476 get_identifier ("__va_next_gp"), ptr_type_node);
3477 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3478 get_identifier ("__va_next_gp_limit"),
3479 ptr_type_node);
3480 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3481 ptr_type_node);
3482 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3483 ptr_type_node);
3485 DECL_FIELD_CONTEXT (f_next_gp) = record;
3486 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3487 DECL_FIELD_CONTEXT (f_next_cop) = record;
3488 DECL_FIELD_CONTEXT (f_next_stack) = record;
3490 TYPE_FIELDS (record) = f_next_gp;
3491 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3492 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3493 DECL_CHAIN (f_next_cop) = f_next_stack;
3495 layout_type (record);
3497 return record;
3500 static void
3501 mep_expand_va_start (tree valist, rtx nextarg)
3503 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3504 tree next_gp, next_gp_limit, next_cop, next_stack;
3505 tree t, u;
3506 int ns;
3508 ns = cfun->machine->arg_regs_to_save;
3510 f_next_gp = TYPE_FIELDS (va_list_type_node);
3511 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3512 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3513 f_next_stack = DECL_CHAIN (f_next_cop);
3515 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3516 NULL_TREE);
3517 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3518 valist, f_next_gp_limit, NULL_TREE);
3519 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3520 NULL_TREE);
3521 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3522 valist, f_next_stack, NULL_TREE);
3524 /* va_list.next_gp = expand_builtin_saveregs (); */
3525 u = make_tree (sizetype, expand_builtin_saveregs ());
3526 u = fold_convert (ptr_type_node, u);
3527 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3528 TREE_SIDE_EFFECTS (t) = 1;
3529 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3531 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3532 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3533 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3534 TREE_SIDE_EFFECTS (t) = 1;
3535 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3537 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3538 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3539 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3540 TREE_SIDE_EFFECTS (t) = 1;
3541 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3543 /* va_list.next_stack = nextarg; */
3544 u = make_tree (ptr_type_node, nextarg);
3545 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3546 TREE_SIDE_EFFECTS (t) = 1;
3547 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3550 static tree
3551 mep_gimplify_va_arg_expr (tree valist, tree type,
3552 gimple_seq *pre_p,
3553 gimple_seq *post_p ATTRIBUTE_UNUSED)
3555 HOST_WIDE_INT size, rsize;
3556 bool by_reference, ivc2_vec;
3557 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3558 tree next_gp, next_gp_limit, next_cop, next_stack;
3559 tree label_sover, label_selse;
3560 tree tmp, res_addr;
3562 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3564 size = int_size_in_bytes (type);
3565 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3567 if (by_reference)
3569 type = build_pointer_type (type);
3570 size = 4;
3572 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3574 f_next_gp = TYPE_FIELDS (va_list_type_node);
3575 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3576 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3577 f_next_stack = DECL_CHAIN (f_next_cop);
3579 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3580 NULL_TREE);
3581 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3582 valist, f_next_gp_limit, NULL_TREE);
3583 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3584 NULL_TREE);
3585 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3586 valist, f_next_stack, NULL_TREE);
3588 /* if f_next_gp < f_next_gp_limit
3589 IF (VECTOR_P && IVC2)
3590 val = *f_next_cop;
3591 ELSE
3592 val = *f_next_gp;
3593 f_next_gp += 4;
3594 f_next_cop += 8;
3595 else
3596 label_selse:
3597 val = *f_next_stack;
3598 f_next_stack += rsize;
3599 label_sover:
3602 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3603 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3604 res_addr = create_tmp_var (ptr_type_node);
3606 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3607 unshare_expr (next_gp_limit));
3608 tmp = build3 (COND_EXPR, void_type_node, tmp,
3609 build1 (GOTO_EXPR, void_type_node,
3610 unshare_expr (label_selse)),
3611 NULL_TREE);
3612 gimplify_and_add (tmp, pre_p);
3614 if (ivc2_vec)
3616 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3617 gimplify_and_add (tmp, pre_p);
3619 else
3621 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3622 gimplify_and_add (tmp, pre_p);
3625 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3626 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3628 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3629 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3631 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3632 gimplify_and_add (tmp, pre_p);
3634 /* - - */
3636 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3637 gimplify_and_add (tmp, pre_p);
3639 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3640 gimplify_and_add (tmp, pre_p);
3642 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3643 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3645 /* - - */
3647 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3648 gimplify_and_add (tmp, pre_p);
3650 res_addr = fold_convert (build_pointer_type (type), res_addr);
3652 if (by_reference)
3653 res_addr = build_va_arg_indirect_ref (res_addr);
3655 return build_va_arg_indirect_ref (res_addr);
3658 void
3659 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3660 rtx libname ATTRIBUTE_UNUSED,
3661 tree fndecl ATTRIBUTE_UNUSED)
3663 pcum->nregs = 0;
3665 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3666 pcum->vliw = 1;
3667 else
3668 pcum->vliw = 0;
3671 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3672 larger than 4 bytes are passed indirectly. Return value in 0,
3673 unless bigger than 4 bytes, then the caller passes a pointer as the
3674 first arg. For varargs, we copy $1..$4 to the stack. */
3676 static rtx
3677 mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
3678 const_tree type ATTRIBUTE_UNUSED,
3679 bool named ATTRIBUTE_UNUSED)
3681 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3683 /* VOIDmode is a signal for the backend to pass data to the call
3684 expander via the second operand to the call pattern. We use
3685 this to determine whether to use "jsr" or "jsrv". */
3686 if (mode == VOIDmode)
3687 return GEN_INT (cum->vliw);
3689 /* If we havn't run out of argument registers, return the next. */
3690 if (cum->nregs < 4)
3692 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3693 return gen_rtx_REG (mode, cum->nregs + 49);
3694 else
3695 return gen_rtx_REG (mode, cum->nregs + 1);
3698 /* Otherwise the argument goes on the stack. */
3699 return NULL_RTX;
3702 static bool
3703 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3704 machine_mode mode,
3705 const_tree type,
3706 bool named ATTRIBUTE_UNUSED)
3708 int size = bytesize (type, mode);
3710 /* This is non-obvious, but yes, large values passed after we've run
3711 out of registers are *still* passed by reference - we put the
3712 address of the parameter on the stack, as well as putting the
3713 parameter itself elsewhere on the stack. */
3715 if (size <= 0 || size > 8)
3716 return true;
3717 if (size <= 4)
3718 return false;
3719 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3720 && type != NULL_TREE && VECTOR_TYPE_P (type))
3721 return false;
3722 return true;
3725 static void
3726 mep_function_arg_advance (cumulative_args_t pcum,
3727 machine_mode mode ATTRIBUTE_UNUSED,
3728 const_tree type ATTRIBUTE_UNUSED,
3729 bool named ATTRIBUTE_UNUSED)
3731 get_cumulative_args (pcum)->nregs += 1;
3734 bool
3735 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3737 int size = bytesize (type, BLKmode);
3738 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3739 return size > 0 && size <= 8 ? 0 : 1;
3740 return size > 0 && size <= 4 ? 0 : 1;
3743 static bool
3744 mep_narrow_volatile_bitfield (void)
3746 return true;
3747 return false;
3750 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3753 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3755 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3756 return gen_rtx_REG (TYPE_MODE (type), 48);
3757 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3760 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3763 mep_libcall_value (machine_mode mode)
3765 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3768 /* Handle pipeline hazards. */
3770 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3771 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3773 static int prev_opcode = 0;
3775 /* This isn't as optimal as it could be, because we don't know what
3776 control register the STC opcode is storing in. We only need to add
3777 the nop if it's the relevant register, but we add it for irrelevant
3778 registers also. */
3780 void
3781 mep_asm_output_opcode (FILE *file, const char *ptr)
3783 int this_opcode = op_none;
3784 const char *hazard = 0;
3786 switch (*ptr)
3788 case 'f':
3789 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3790 this_opcode = op_fsft;
3791 break;
3792 case 'r':
3793 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3794 this_opcode = op_ret;
3795 break;
3796 case 's':
3797 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3798 this_opcode = op_stc;
3799 break;
3802 if (prev_opcode == op_stc && this_opcode == op_fsft)
3803 hazard = "nop";
3804 if (prev_opcode == op_stc && this_opcode == op_ret)
3805 hazard = "nop";
3807 if (hazard)
3808 fprintf(file, "%s\t# %s-%s hazard\n\t",
3809 hazard, opnames[prev_opcode], opnames[this_opcode]);
3811 prev_opcode = this_opcode;
3814 /* Handle attributes. */
3816 static tree
3817 mep_validate_type_based_tiny (tree *node, tree name, tree args,
3818 int flags ATTRIBUTE_UNUSED, bool *no_add)
3820 if (TREE_CODE (*node) != POINTER_TYPE)
3822 warning (0, "%qE attribute only applies to variables", name);
3823 *no_add = true;
3826 return NULL_TREE;
3829 static tree
3830 mep_validate_decl_based_tiny (tree *node, tree name, tree args,
3831 int flags ATTRIBUTE_UNUSED, bool *no_add)
3833 if (TREE_CODE (*node) != VAR_DECL
3834 && TREE_CODE (*node) != TYPE_DECL)
3836 warning (0, "%qE attribute only applies to variables", name);
3837 *no_add = true;
3839 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3841 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3843 warning (0, "address region attributes not allowed with auto storage class");
3844 *no_add = true;
3846 /* Ignore storage attribute of pointed to variable: char __far * x; */
3847 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3849 warning (0, "address region attributes on pointed-to types ignored");
3850 *no_add = true;
3854 return NULL_TREE;
3857 static int
3858 mep_multiple_address_regions (tree list, bool check_section_attr)
3860 tree a;
3861 int count_sections = 0;
3862 int section_attr_count = 0;
3864 for (a = list; a; a = TREE_CHAIN (a))
3866 if (is_attribute_p ("based", TREE_PURPOSE (a))
3867 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3868 || is_attribute_p ("near", TREE_PURPOSE (a))
3869 || is_attribute_p ("far", TREE_PURPOSE (a))
3870 || is_attribute_p ("io", TREE_PURPOSE (a)))
3871 count_sections ++;
3872 if (check_section_attr)
3873 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3876 if (check_section_attr)
3877 return section_attr_count;
3878 else
3879 return count_sections;
3882 #define MEP_ATTRIBUTES(decl) \
3883 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3884 : DECL_ATTRIBUTES (decl) \
3885 ? (DECL_ATTRIBUTES (decl)) \
3886 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3888 static tree
3889 mep_validate_type_near_far (tree *node, tree name, tree args,
3890 int flags ATTRIBUTE_UNUSED, bool *no_add)
3892 if (TREE_CODE (*node) != METHOD_TYPE && TREE_CODE (*node) != POINTER_TYPE)
3894 warning (0, "%qE attribute only applies to variables and functions",
3895 name);
3896 *no_add = true;
3898 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3900 warning (0, "duplicate address region attribute %qE", name);
3901 TYPE_ATTRIBUTES (*node) = NULL_TREE;
3903 return NULL_TREE;
3906 static tree
3907 mep_validate_decl_near_far (tree *node, tree name, tree args,
3908 int flags ATTRIBUTE_UNUSED, bool *no_add)
3910 if (TREE_CODE (*node) != VAR_DECL
3911 && TREE_CODE (*node) != FUNCTION_DECL
3912 && TREE_CODE (*node) != TYPE_DECL)
3914 warning (0, "%qE attribute only applies to variables and functions",
3915 name);
3916 *no_add = true;
3918 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3920 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3922 warning (0, "address region attributes not allowed with auto storage class");
3923 *no_add = true;
3925 /* Ignore storage attribute of pointed to variable: char __far * x; */
3926 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3928 warning (0, "address region attributes on pointed-to types ignored");
3929 *no_add = true;
3932 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3934 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3935 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3936 DECL_ATTRIBUTES (*node) = NULL_TREE;
3938 return NULL_TREE;
3941 static tree
3942 mep_validate_type_disinterrupt (tree *node, tree name,
3943 tree args ATTRIBUTE_UNUSED,
3944 int flags ATTRIBUTE_UNUSED, bool *no_add)
3946 if (TREE_CODE (*node) != METHOD_TYPE)
3948 warning (0, "%qE attribute only applies to functions", name);
3949 *no_add = true;
3951 return NULL_TREE;
3954 static tree
3955 mep_validate_decl_disinterrupt (tree *node, tree name,
3956 tree args ATTRIBUTE_UNUSED,
3957 int flags ATTRIBUTE_UNUSED, bool *no_add)
3959 if (TREE_CODE (*node) != FUNCTION_DECL)
3961 warning (0, "%qE attribute only applies to functions", name);
3962 *no_add = true;
3964 return NULL_TREE;
3967 static tree
3968 mep_validate_type_warning (tree *node ATTRIBUTE_UNUSED, tree name,
3969 tree args ATTRIBUTE_UNUSED,
3970 int flags ATTRIBUTE_UNUSED, bool *no_add)
3972 warning (0, "%qE attribute only applies to functions", name);
3973 *no_add = true;
3974 return NULL_TREE;
3978 static tree
3979 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3980 int flags ATTRIBUTE_UNUSED, bool *no_add)
3982 tree function_type;
3984 if (TREE_CODE (*node) != FUNCTION_DECL)
3986 warning (0, "%qE attribute only applies to functions", name);
3987 *no_add = true;
3988 return NULL_TREE;
3991 if (DECL_DECLARED_INLINE_P (*node))
3992 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3993 DECL_UNINLINABLE (*node) = 1;
3995 function_type = TREE_TYPE (*node);
3997 if (TREE_TYPE (function_type) != void_type_node)
3998 error ("interrupt function must have return type of void");
4000 if (prototype_p (function_type)
4001 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4002 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4003 error ("interrupt function must have no arguments");
4005 return NULL_TREE;
4008 static tree
4009 mep_validate_io_cb (tree *node, tree name, tree args,
4010 int flags ATTRIBUTE_UNUSED, bool *no_add)
4012 if (TREE_CODE (*node) != VAR_DECL)
4014 warning (0, "%qE attribute only applies to variables", name);
4015 *no_add = true;
4018 if (args != NULL_TREE)
4020 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4021 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4022 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4024 warning (0, "%qE attribute allows only an integer constant argument",
4025 name);
4026 *no_add = true;
4030 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4031 TREE_THIS_VOLATILE (*node) = 1;
4033 return NULL_TREE;
4036 static tree
4037 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4038 int flags ATTRIBUTE_UNUSED, bool *no_add)
4040 if (TREE_CODE (*node) != FUNCTION_TYPE
4041 && TREE_CODE (*node) != FUNCTION_DECL
4042 && TREE_CODE (*node) != METHOD_TYPE
4043 && TREE_CODE (*node) != FIELD_DECL
4044 && TREE_CODE (*node) != TYPE_DECL)
4046 static int gave_pointer_note = 0;
4047 static int gave_array_note = 0;
4048 static const char * given_type = NULL;
4050 given_type = get_tree_code_name (TREE_CODE (*node));
4051 if (TREE_CODE (*node) == POINTER_TYPE)
4052 given_type = "pointers";
4053 if (TREE_CODE (*node) == ARRAY_TYPE)
4054 given_type = "arrays";
4056 if (given_type)
4057 warning (0, "%qE attribute only applies to functions, not %s",
4058 name, given_type);
4059 else
4060 warning (0, "%qE attribute only applies to functions",
4061 name);
4062 *no_add = true;
4064 if (TREE_CODE (*node) == POINTER_TYPE
4065 && !gave_pointer_note)
4067 inform (input_location,
4068 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4069 " typedef int (__vliw *vfuncptr) ();");
4070 gave_pointer_note = 1;
4073 if (TREE_CODE (*node) == ARRAY_TYPE
4074 && !gave_array_note)
4076 inform (input_location,
4077 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4078 " typedef int (__vliw *vfuncptr[]) ();");
4079 gave_array_note = 1;
4082 if (!TARGET_VLIW)
4083 error ("VLIW functions are not allowed without a VLIW configuration");
4084 return NULL_TREE;
4087 static const struct attribute_spec mep_attribute_table[11] =
4089 /* name min max decl type func handler
4090 affects_type_identity */
4091 { "based", 0, 0, false, false, false, mep_validate_decl_based_tiny,
4092 mep_validate_type_based_tiny, false },
4093 { "tiny", 0, 0, false, false, false, mep_validate_decl_based_tiny,
4094 mep_validate_type_based_tiny, false },
4095 { "near", 0, 0, false, false, false, mep_validate_decl_near_far,
4096 mep_validate_type_near_far, false },
4097 { "far", 0, 0, false, false, false, mep_validate_decl_near_far,
4098 mep_validate_type_near_far, false },
4099 { "disinterrupt", 0, 0, false, false, false, mep_validate_decl_disinterrupt,
4100 mep_validate_type_disinterrupt, false },
4101 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt,
4102 mep_validate_type_warning, false },
4103 { "io", 0, 1, false, false, false, mep_validate_io_cb,
4104 mep_validate_type_warning, false },
4105 { "cb", 0, 1, false, false, false, mep_validate_io_cb,
4106 mep_validate_type_warning, false },
4107 { "vliw", 0, 0, false, true, false, NULL, mep_validate_vliw, false },
4108 { NULL, 0, 0, false, false, false, NULL, NULL, false }
4111 static bool
4112 mep_function_attribute_inlinable_p (const_tree callee)
4114 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4115 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4116 return (lookup_attribute ("disinterrupt", attrs) == 0
4117 && lookup_attribute ("interrupt", attrs) == 0);
4120 static bool
4121 mep_can_inline_p (tree caller, tree callee)
4123 if (TREE_CODE (callee) == ADDR_EXPR)
4124 callee = TREE_OPERAND (callee, 0);
4126 if (!mep_vliw_function_p (caller)
4127 && mep_vliw_function_p (callee))
4129 return false;
4131 return true;
4134 #define FUNC_CALL 1
4135 #define FUNC_DISINTERRUPT 2
4138 struct GTY(()) pragma_entry {
4139 int used;
4140 int flag;
4143 struct pragma_traits : default_hashmap_traits
4145 static hashval_t hash (const char *s) { return htab_hash_string (s); }
4146 static bool
4147 equal_keys (const char *a, const char *b)
4149 return strcmp (a, b) == 0;
4153 /* Hash table of farcall-tagged sections. */
4154 static GTY(()) hash_map<const char *, pragma_entry, pragma_traits> *
4155 pragma_htab;
4157 static void
4158 mep_note_pragma_flag (const char *funcname, int flag)
4160 if (!pragma_htab)
4161 pragma_htab
4162 = hash_map<const char *, pragma_entry, pragma_traits>::create_ggc (31);
4164 bool existed;
4165 const char *name = ggc_strdup (funcname);
4166 pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4167 if (!existed)
4169 slot->flag = 0;
4170 slot->used = 0;
4172 slot->flag |= flag;
4175 static bool
4176 mep_lookup_pragma_flag (const char *funcname, int flag)
4178 if (!pragma_htab)
4179 return false;
4181 if (funcname[0] == '@' && funcname[2] == '.')
4182 funcname += 3;
4184 pragma_entry *slot = pragma_htab->get (funcname);
4185 if (slot && (slot->flag & flag))
4187 slot->used |= flag;
4188 return true;
4190 return false;
4193 bool
4194 mep_lookup_pragma_call (const char *funcname)
4196 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4199 void
4200 mep_note_pragma_call (const char *funcname)
4202 mep_note_pragma_flag (funcname, FUNC_CALL);
4205 bool
4206 mep_lookup_pragma_disinterrupt (const char *funcname)
4208 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4211 void
4212 mep_note_pragma_disinterrupt (const char *funcname)
4214 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4217 bool
4218 note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4219 void *)
4221 if ((e.flag & FUNC_DISINTERRUPT)
4222 && !(e.used & FUNC_DISINTERRUPT))
4223 warning (0, "\"#pragma disinterrupt %s\" not used", s);
4224 return 1;
4227 void
4228 mep_file_cleanups (void)
4230 if (pragma_htab)
4231 pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
4234 /* These three functions provide a bridge between the pramgas that
4235 affect register classes, and the functions that maintain them. We
4236 can't call those functions directly as pragma handling is part of
4237 the front end and doesn't have direct access to them. */
4239 void
4240 mep_save_register_info (void)
4242 save_register_info ();
4245 void
4246 mep_reinit_regs (void)
4248 reinit_regs ();
4251 void
4252 mep_init_regs (void)
4254 init_regs ();
4259 static int
4260 mep_attrlist_to_encoding (tree list, tree decl)
4262 if (mep_multiple_address_regions (list, false) > 1)
4264 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4265 TREE_PURPOSE (TREE_CHAIN (list)),
4266 DECL_NAME (decl),
4267 DECL_SOURCE_LINE (decl));
4268 TREE_CHAIN (list) = NULL_TREE;
4271 while (list)
4273 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4274 return 'b';
4275 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4276 return 't';
4277 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4278 return 'n';
4279 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4280 return 'f';
4281 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4283 if (TREE_VALUE (list)
4284 && TREE_VALUE (TREE_VALUE (list))
4285 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4287 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4288 if (location >= 0
4289 && location <= 0x1000000)
4290 return 'i';
4292 return 'I';
4294 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4295 return 'c';
4296 list = TREE_CHAIN (list);
4298 if (TARGET_TF
4299 && TREE_CODE (decl) == FUNCTION_DECL
4300 && DECL_SECTION_NAME (decl) == 0)
4301 return 'f';
4302 return 0;
4305 static int
4306 mep_comp_type_attributes (const_tree t1, const_tree t2)
4308 int vliw1, vliw2;
4310 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4311 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4313 if (vliw1 != vliw2)
4314 return 0;
4316 return 1;
4319 static void
4320 mep_insert_attributes (tree decl, tree *attributes)
4322 int size;
4323 const char *secname = 0;
4324 tree attrib, attrlist;
4325 char encoding;
4327 if (TREE_CODE (decl) == FUNCTION_DECL)
4329 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4331 if (mep_lookup_pragma_disinterrupt (funcname))
4333 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4334 *attributes = chainon (*attributes, attrib);
4338 if (TREE_CODE (decl) != VAR_DECL
4339 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4340 return;
4342 if (TREE_READONLY (decl) && TARGET_DC)
4343 /* -mdc means that const variables default to the near section,
4344 regardless of the size cutoff. */
4345 return;
4347 /* User specified an attribute, so override the default.
4348 Ignore storage attribute of pointed to variable. char __far * x; */
4349 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4351 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4352 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4353 else if (DECL_ATTRIBUTES (decl) && *attributes)
4354 DECL_ATTRIBUTES (decl) = NULL_TREE;
4357 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4358 encoding = mep_attrlist_to_encoding (attrlist, decl);
4359 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4361 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4362 encoding = mep_attrlist_to_encoding (attrlist, decl);
4364 if (encoding)
4366 /* This means that the declaration has a specific section
4367 attribute, so we should not apply the default rules. */
4369 if (encoding == 'i' || encoding == 'I')
4371 tree attr = lookup_attribute ("io", attrlist);
4372 if (attr
4373 && TREE_VALUE (attr)
4374 && TREE_VALUE (TREE_VALUE(attr)))
4376 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4377 static tree previous_value = 0;
4378 static int previous_location = 0;
4379 static tree previous_name = 0;
4381 /* We take advantage of the fact that gcc will reuse the
4382 same tree pointer when applying an attribute to a
4383 list of decls, but produce a new tree for attributes
4384 on separate source lines, even when they're textually
4385 identical. This is the behavior we want. */
4386 if (TREE_VALUE (attr) == previous_value
4387 && location == previous_location)
4389 warning(0, "__io address 0x%x is the same for %qE and %qE",
4390 location, previous_name, DECL_NAME (decl));
4392 previous_name = DECL_NAME (decl);
4393 previous_location = location;
4394 previous_value = TREE_VALUE (attr);
4397 return;
4401 /* Declarations of arrays can change size. Don't trust them. */
4402 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4403 size = 0;
4404 else
4405 size = int_size_in_bytes (TREE_TYPE (decl));
4407 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4409 if (TREE_PUBLIC (decl)
4410 || DECL_EXTERNAL (decl)
4411 || TREE_STATIC (decl))
4413 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4414 int key = 0;
4416 while (*name)
4417 key += *name++;
4419 switch (key & 3)
4421 case 0:
4422 secname = "based";
4423 break;
4424 case 1:
4425 secname = "tiny";
4426 break;
4427 case 2:
4428 secname = "far";
4429 break;
4430 default:
4435 else
4437 if (size <= mep_based_cutoff && size > 0)
4438 secname = "based";
4439 else if (size <= mep_tiny_cutoff && size > 0)
4440 secname = "tiny";
4441 else if (TARGET_L)
4442 secname = "far";
4445 if (mep_const_section && TREE_READONLY (decl))
4447 if (strcmp (mep_const_section, "tiny") == 0)
4448 secname = "tiny";
4449 else if (strcmp (mep_const_section, "near") == 0)
4450 return;
4451 else if (strcmp (mep_const_section, "far") == 0)
4452 secname = "far";
4455 if (!secname)
4456 return;
4458 if (!mep_multiple_address_regions (*attributes, true)
4459 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4461 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4463 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4464 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4465 and mep_validate_based_tiny. */
4466 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4470 static void
4471 mep_encode_section_info (tree decl, rtx rtl, int first)
4473 rtx rtlname;
4474 const char *oldname;
4475 const char *secname;
4476 char encoding;
4477 char *newname;
4478 tree idp;
4479 int maxsize;
4480 tree type;
4481 tree mep_attributes;
4483 if (! first)
4484 return;
4486 if (TREE_CODE (decl) != VAR_DECL
4487 && TREE_CODE (decl) != FUNCTION_DECL)
4488 return;
4490 rtlname = XEXP (rtl, 0);
4491 if (GET_CODE (rtlname) == SYMBOL_REF)
4492 oldname = XSTR (rtlname, 0);
4493 else if (GET_CODE (rtlname) == MEM
4494 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4495 oldname = XSTR (XEXP (rtlname, 0), 0);
4496 else
4497 gcc_unreachable ();
4499 type = TREE_TYPE (decl);
4500 if (type == error_mark_node)
4501 return;
4502 mep_attributes = MEP_ATTRIBUTES (decl);
4504 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4506 if (encoding)
4508 newname = (char *) alloca (strlen (oldname) + 4);
4509 sprintf (newname, "@%c.%s", encoding, oldname);
4510 idp = get_identifier (newname);
4511 XEXP (rtl, 0) =
4512 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4513 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4514 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4516 switch (encoding)
4518 case 'b':
4519 maxsize = 128;
4520 secname = "based";
4521 break;
4522 case 't':
4523 maxsize = 65536;
4524 secname = "tiny";
4525 break;
4526 case 'n':
4527 maxsize = 0x1000000;
4528 secname = "near";
4529 break;
4530 default:
4531 maxsize = 0;
4532 secname = 0;
4533 break;
4535 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4537 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4538 oldname,
4539 (long) int_size_in_bytes (TREE_TYPE (decl)),
4540 secname,
4541 maxsize);
4546 const char *
4547 mep_strip_name_encoding (const char *sym)
4549 while (1)
4551 if (*sym == '*')
4552 sym++;
4553 else if (*sym == '@' && sym[2] == '.')
4554 sym += 3;
4555 else
4556 return sym;
4560 static section *
4561 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4562 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4564 int readonly = 1;
4565 int encoding;
4567 switch (TREE_CODE (decl))
4569 case VAR_DECL:
4570 if (!TREE_READONLY (decl)
4571 || TREE_SIDE_EFFECTS (decl)
4572 || !DECL_INITIAL (decl)
4573 || (DECL_INITIAL (decl) != error_mark_node
4574 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4575 readonly = 0;
4576 break;
4577 case CONSTRUCTOR:
4578 if (! TREE_CONSTANT (decl))
4579 readonly = 0;
4580 break;
4582 default:
4583 break;
4586 if (TREE_CODE (decl) == FUNCTION_DECL)
4588 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4590 if (name[0] == '@' && name[2] == '.')
4591 encoding = name[1];
4592 else
4593 encoding = 0;
4595 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4596 mep_unique_section (decl, 0);
4597 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4599 if (encoding == 'f')
4600 return vftext_section;
4601 else
4602 return vtext_section;
4604 else if (encoding == 'f')
4605 return ftext_section;
4606 else
4607 return text_section;
4610 if (TREE_CODE (decl) == VAR_DECL)
4612 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4614 if (name[0] == '@' && name[2] == '.')
4615 switch (name[1])
4617 case 'b':
4618 return based_section;
4620 case 't':
4621 if (readonly)
4622 return srodata_section;
4623 if (DECL_INITIAL (decl))
4624 return sdata_section;
4625 return tinybss_section;
4627 case 'f':
4628 if (readonly)
4629 return frodata_section;
4630 return far_section;
4632 case 'i':
4633 case 'I':
4634 error_at (DECL_SOURCE_LOCATION (decl),
4635 "variable %D of type %<io%> must be uninitialized", decl);
4636 return data_section;
4638 case 'c':
4639 error_at (DECL_SOURCE_LOCATION (decl),
4640 "variable %D of type %<cb%> must be uninitialized", decl);
4641 return data_section;
4645 if (readonly)
4646 return readonly_data_section;
4648 return data_section;
4651 static void
4652 mep_unique_section (tree decl, int reloc)
4654 static const char *prefixes[][2] =
4656 { ".text.", ".gnu.linkonce.t." },
4657 { ".rodata.", ".gnu.linkonce.r." },
4658 { ".data.", ".gnu.linkonce.d." },
4659 { ".based.", ".gnu.linkonce.based." },
4660 { ".sdata.", ".gnu.linkonce.s." },
4661 { ".far.", ".gnu.linkonce.far." },
4662 { ".ftext.", ".gnu.linkonce.ft." },
4663 { ".frodata.", ".gnu.linkonce.frd." },
4664 { ".srodata.", ".gnu.linkonce.srd." },
4665 { ".vtext.", ".gnu.linkonce.v." },
4666 { ".vftext.", ".gnu.linkonce.vf." }
4668 int sec = 2; /* .data */
4669 int len;
4670 const char *name, *prefix;
4671 char *string;
4673 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4674 if (DECL_RTL (decl))
4675 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4677 if (TREE_CODE (decl) == FUNCTION_DECL)
4679 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4680 sec = 9; /* .vtext */
4681 else
4682 sec = 0; /* .text */
4684 else if (decl_readonly_section (decl, reloc))
4685 sec = 1; /* .rodata */
4687 if (name[0] == '@' && name[2] == '.')
4689 switch (name[1])
4691 case 'b':
4692 sec = 3; /* .based */
4693 break;
4694 case 't':
4695 if (sec == 1)
4696 sec = 8; /* .srodata */
4697 else
4698 sec = 4; /* .sdata */
4699 break;
4700 case 'f':
4701 if (sec == 0)
4702 sec = 6; /* .ftext */
4703 else if (sec == 9)
4704 sec = 10; /* .vftext */
4705 else if (sec == 1)
4706 sec = 7; /* .frodata */
4707 else
4708 sec = 5; /* .far. */
4709 break;
4711 name += 3;
4714 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4715 len = strlen (name) + strlen (prefix);
4716 string = (char *) alloca (len + 1);
4718 sprintf (string, "%s%s", prefix, name);
4720 set_decl_section_name (decl, string);
4723 /* Given a decl, a section name, and whether the decl initializer
4724 has relocs, choose attributes for the section. */
4726 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4728 static unsigned int
4729 mep_section_type_flags (tree decl, const char *name, int reloc)
4731 unsigned int flags = default_section_type_flags (decl, name, reloc);
4733 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4734 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4735 flags |= SECTION_MEP_VLIW;
4737 return flags;
4740 /* Switch to an arbitrary section NAME with attributes as specified
4741 by FLAGS. ALIGN specifies any known alignment requirements for
4742 the section; 0 if the default should be used.
4744 Differs from the standard ELF version only in support of VLIW mode. */
4746 static void
4747 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4749 char flagchars[8], *f = flagchars;
4750 const char *type;
4752 if (!(flags & SECTION_DEBUG))
4753 *f++ = 'a';
4754 if (flags & SECTION_WRITE)
4755 *f++ = 'w';
4756 if (flags & SECTION_CODE)
4757 *f++ = 'x';
4758 if (flags & SECTION_SMALL)
4759 *f++ = 's';
4760 if (flags & SECTION_MEP_VLIW)
4761 *f++ = 'v';
4762 *f = '\0';
4764 if (flags & SECTION_BSS)
4765 type = "nobits";
4766 else
4767 type = "progbits";
4769 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4770 name, flagchars, type);
4772 if (flags & SECTION_CODE)
4773 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4774 asm_out_file);
4777 void
4778 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4779 int size, int align, int global)
4781 /* We intentionally don't use mep_section_tag() here. */
4782 if (name[0] == '@'
4783 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4784 && name[2] == '.')
4786 int location = -1;
4787 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4788 DECL_ATTRIBUTES (decl));
4789 if (attr
4790 && TREE_VALUE (attr)
4791 && TREE_VALUE (TREE_VALUE(attr)))
4792 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4793 if (location == -1)
4794 return;
4795 if (global)
4797 fprintf (stream, "\t.globl\t");
4798 assemble_name (stream, name);
4799 fprintf (stream, "\n");
4801 assemble_name (stream, name);
4802 fprintf (stream, " = %d\n", location);
4803 return;
4805 if (name[0] == '@' && name[2] == '.')
4807 const char *sec = 0;
4808 switch (name[1])
4810 case 'b':
4811 switch_to_section (based_section);
4812 sec = ".based";
4813 break;
4814 case 't':
4815 switch_to_section (tinybss_section);
4816 sec = ".sbss";
4817 break;
4818 case 'f':
4819 switch_to_section (farbss_section);
4820 sec = ".farbss";
4821 break;
4823 if (sec)
4825 const char *name2;
4826 int p2align = 0;
4828 while (align > BITS_PER_UNIT)
4830 align /= 2;
4831 p2align ++;
4833 name2 = targetm.strip_name_encoding (name);
4834 if (global)
4835 fprintf (stream, "\t.globl\t%s\n", name2);
4836 fprintf (stream, "\t.p2align %d\n", p2align);
4837 fprintf (stream, "\t.type\t%s,@object\n", name2);
4838 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4839 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4840 return;
4844 if (!global)
4846 fprintf (stream, "\t.local\t");
4847 assemble_name (stream, name);
4848 fprintf (stream, "\n");
4850 fprintf (stream, "\t.comm\t");
4851 assemble_name (stream, name);
4852 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4855 /* Trampolines. */
4857 static void
4858 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4860 rtx addr = XEXP (m_tramp, 0);
4861 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4863 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4864 LCT_NORMAL, VOIDmode, 3,
4865 addr, Pmode,
4866 fnaddr, Pmode,
4867 static_chain, Pmode);
4870 /* Experimental Reorg. */
4872 static bool
4873 mep_mentioned_p (rtx in,
4874 rtx reg, /* NULL for mem */
4875 int modes_too) /* if nonzero, modes must match also. */
4877 const char *fmt;
4878 int i;
4879 enum rtx_code code;
4881 if (in == 0)
4882 return false;
4883 if (reg && GET_CODE (reg) != REG)
4884 return false;
4886 if (GET_CODE (in) == LABEL_REF)
4887 return (reg == 0);
4889 code = GET_CODE (in);
4891 switch (code)
4893 case MEM:
4894 if (reg)
4895 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4896 return true;
4898 case REG:
4899 if (!reg)
4900 return false;
4901 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4902 return false;
4903 return (REGNO (in) == REGNO (reg));
4905 case SCRATCH:
4906 case CC0:
4907 case PC:
4908 case CONST_INT:
4909 case CONST_DOUBLE:
4910 return false;
4912 default:
4913 break;
4916 /* Set's source should be read-only. */
4917 if (code == SET && !reg)
4918 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4920 fmt = GET_RTX_FORMAT (code);
4922 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4924 if (fmt[i] == 'E')
4926 register int j;
4927 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4928 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4929 return true;
4931 else if (fmt[i] == 'e'
4932 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4933 return true;
4935 return false;
4938 #define EXPERIMENTAL_REGMOVE_REORG 1
4940 #if EXPERIMENTAL_REGMOVE_REORG
4942 static int
4943 mep_compatible_reg_class (int r1, int r2)
4945 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4946 return 1;
4947 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4948 return 1;
4949 return 0;
4952 static void
4953 mep_reorg_regmove (rtx_insn *insns)
4955 rtx_insn *insn, *next, *follow;
4956 rtx pat, *where;
4957 int count = 0, done = 0, replace, before = 0;
4959 if (dump_file)
4960 for (insn = insns; insn; insn = NEXT_INSN (insn))
4961 if (NONJUMP_INSN_P (insn))
4962 before++;
4964 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4965 set that uses the r2 and r2 dies there. We replace r2 with r1
4966 and see if it's still a valid insn. If so, delete the first set.
4967 Copied from reorg.c. */
4969 while (!done)
4971 done = 1;
4972 for (insn = insns; insn; insn = next)
4974 next = next_nonnote_nondebug_insn (insn);
4975 if (! NONJUMP_INSN_P (insn))
4976 continue;
4977 pat = PATTERN (insn);
4979 replace = 0;
4981 if (GET_CODE (pat) == SET
4982 && GET_CODE (SET_SRC (pat)) == REG
4983 && GET_CODE (SET_DEST (pat)) == REG
4984 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4985 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4987 follow = next_nonnote_nondebug_insn (insn);
4988 if (dump_file)
4989 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4991 while (follow && NONJUMP_INSN_P (follow)
4992 && GET_CODE (PATTERN (follow)) == SET
4993 && !dead_or_set_p (follow, SET_SRC (pat))
4994 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4995 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4997 if (dump_file)
4998 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4999 follow = next_nonnote_insn (follow);
5002 if (dump_file)
5003 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5004 if (follow && NONJUMP_INSN_P (follow)
5005 && GET_CODE (PATTERN (follow)) == SET
5006 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5008 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5010 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5012 replace = 1;
5013 where = & SET_SRC (PATTERN (follow));
5016 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5018 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5020 replace = 1;
5021 where = & PATTERN (follow);
5027 /* If so, follow is the corresponding insn */
5028 if (replace)
5030 if (dump_file)
5032 rtx_insn *x;
5034 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5035 for (x = insn; x ;x = NEXT_INSN (x))
5037 print_rtl_single (dump_file, x);
5038 if (x == follow)
5039 break;
5040 fprintf (dump_file, "\n");
5044 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5045 follow, where))
5047 count ++;
5048 delete_insn (insn);
5049 if (dump_file)
5051 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5052 print_rtl_single (dump_file, follow);
5054 done = 0;
5060 if (dump_file)
5062 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5063 fprintf (dump_file, "=====\n");
5066 #endif
5069 /* Figure out where to put LABEL, which is the label for a repeat loop.
5070 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5071 the loop ends just before LAST_INSN. If SHARED, insns other than the
5072 "repeat" might use LABEL to jump to the loop's continuation point.
5074 Return the last instruction in the adjusted loop. */
5076 static rtx_insn *
5077 mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
5078 bool including, bool shared)
5080 rtx_insn *next, *prev;
5081 int count = 0, code, icode;
5083 if (dump_file)
5084 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5085 INSN_UID (last_insn));
5087 /* Set PREV to the last insn in the loop. */
5088 prev = last_insn;
5089 if (!including)
5090 prev = PREV_INSN (prev);
5092 /* Set NEXT to the next insn after the repeat label. */
5093 next = last_insn;
5094 if (!shared)
5095 while (prev != 0)
5097 code = GET_CODE (prev);
5098 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5099 break;
5101 if (INSN_P (prev))
5103 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5104 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
5106 /* Other insns that should not be in the last two opcodes. */
5107 icode = recog_memoized (prev);
5108 if (icode < 0
5109 || icode == CODE_FOR_repeat
5110 || icode == CODE_FOR_erepeat
5111 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5112 break;
5114 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5115 is the second instruction in a VLIW bundle. In that case,
5116 loop again: if the first instruction also satisfies the
5117 conditions above then we will reach here again and put
5118 both of them into the repeat epilogue. Otherwise both
5119 should remain outside. */
5120 if (GET_MODE (prev) != BImode)
5122 count++;
5123 next = prev;
5124 if (dump_file)
5125 print_rtl_single (dump_file, next);
5126 if (count == 2)
5127 break;
5130 prev = PREV_INSN (prev);
5133 /* See if we're adding the label immediately after the repeat insn.
5134 If so, we need to separate them with a nop. */
5135 prev = prev_real_insn (next);
5136 if (prev)
5137 switch (recog_memoized (prev))
5139 case CODE_FOR_repeat:
5140 case CODE_FOR_erepeat:
5141 if (dump_file)
5142 fprintf (dump_file, "Adding nop inside loop\n");
5143 emit_insn_before (gen_nop (), next);
5144 break;
5146 default:
5147 break;
5150 /* Insert the label. */
5151 emit_label_before (label, next);
5153 /* Insert the nops. */
5154 if (dump_file && count < 2)
5155 fprintf (dump_file, "Adding %d nop%s\n\n",
5156 2 - count, count == 1 ? "" : "s");
5158 for (; count < 2; count++)
5159 if (including)
5160 last_insn = emit_insn_after (gen_nop (), last_insn);
5161 else
5162 emit_insn_before (gen_nop (), last_insn);
5164 return last_insn;
5168 void
5169 mep_emit_doloop (rtx *operands, int is_end)
5171 rtx tag;
5173 if (cfun->machine->doloop_tags == 0
5174 || cfun->machine->doloop_tag_from_end == is_end)
5176 cfun->machine->doloop_tags++;
5177 cfun->machine->doloop_tag_from_end = is_end;
5180 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5181 if (is_end)
5182 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5183 else
5184 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5188 /* Code for converting doloop_begins and doloop_ends into valid
5189 MeP instructions. A doloop_begin is just a placeholder:
5191 $count = unspec ($count)
5193 where $count is initially the number of iterations - 1.
5194 doloop_end has the form:
5196 if ($count-- == 0) goto label
5198 The counter variable is private to the doloop insns, nothing else
5199 relies on its value.
5201 There are three cases, in decreasing order of preference:
5203 1. A loop has exactly one doloop_begin and one doloop_end.
5204 The doloop_end branches to the first instruction after
5205 the doloop_begin.
5207 In this case we can replace the doloop_begin with a repeat
5208 instruction and remove the doloop_end. I.e.:
5210 $count1 = unspec ($count1)
5211 label:
5213 insn1
5214 insn2
5215 if ($count2-- == 0) goto label
5217 becomes:
5219 repeat $count1,repeat_label
5220 label:
5222 repeat_label:
5223 insn1
5224 insn2
5225 # end repeat
5227 2. As for (1), except there are several doloop_ends. One of them
5228 (call it X) falls through to a label L. All the others fall
5229 through to branches to L.
5231 In this case, we remove X and replace the other doloop_ends
5232 with branches to the repeat label. For example:
5234 $count1 = unspec ($count1)
5235 start:
5237 if ($count2-- == 0) goto label
5238 end:
5240 if ($count3-- == 0) goto label
5241 goto end
5243 becomes:
5245 repeat $count1,repeat_label
5246 start:
5248 repeat_label:
5251 # end repeat
5252 end:
5254 goto repeat_label
5256 3. The fallback case. Replace doloop_begins with:
5258 $count = $count + 1
5260 Replace doloop_ends with the equivalent of:
5262 $count = $count - 1
5263 if ($count == 0) goto label
5265 Note that this might need a scratch register if $count
5266 is stored in memory. */
5268 /* A structure describing one doloop_begin. */
5269 struct mep_doloop_begin {
5270 /* The next doloop_begin with the same tag. */
5271 struct mep_doloop_begin *next;
5273 /* The instruction itself. */
5274 rtx_insn *insn;
5276 /* The initial counter value. This is known to be a general register. */
5277 rtx counter;
5280 /* A structure describing a doloop_end. */
5281 struct mep_doloop_end {
5282 /* The next doloop_end with the same loop tag. */
5283 struct mep_doloop_end *next;
5285 /* The instruction itself. */
5286 rtx_insn *insn;
5288 /* The first instruction after INSN when the branch isn't taken. */
5289 rtx_insn *fallthrough;
5291 /* The location of the counter value. Since doloop_end_internal is a
5292 jump instruction, it has to allow the counter to be stored anywhere
5293 (any non-fixed register or memory location). */
5294 rtx counter;
5296 /* The target label (the place where the insn branches when the counter
5297 isn't zero). */
5298 rtx label;
5300 /* A scratch register. Only available when COUNTER isn't stored
5301 in a general register. */
5302 rtx scratch;
5306 /* One do-while loop. */
5307 struct mep_doloop {
5308 /* All the doloop_begins for this loop (in no particular order). */
5309 struct mep_doloop_begin *begin;
5311 /* All the doloop_ends. When there is more than one, arrange things
5312 so that the first one is the most likely to be X in case (2) above. */
5313 struct mep_doloop_end *end;
5317 /* Return true if LOOP can be converted into repeat/repeat_end form
5318 (that is, if it matches cases (1) or (2) above). */
5320 static bool
5321 mep_repeat_loop_p (struct mep_doloop *loop)
5323 struct mep_doloop_end *end;
5324 rtx fallthrough;
5326 /* There must be exactly one doloop_begin and at least one doloop_end. */
5327 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5328 return false;
5330 /* The first doloop_end (X) must branch back to the insn after
5331 the doloop_begin. */
5332 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5333 return false;
5335 /* All the other doloop_ends must branch to the same place as X.
5336 When the branch isn't taken, they must jump to the instruction
5337 after X. */
5338 fallthrough = loop->end->fallthrough;
5339 for (end = loop->end->next; end != 0; end = end->next)
5340 if (end->label != loop->end->label
5341 || !simplejump_p (end->fallthrough)
5342 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5343 return false;
5345 return true;
5349 /* The main repeat reorg function. See comment above for details. */
5351 static void
5352 mep_reorg_repeat (rtx_insn *insns)
5354 rtx_insn *insn;
5355 struct mep_doloop *loops, *loop;
5356 struct mep_doloop_begin *begin;
5357 struct mep_doloop_end *end;
5359 /* Quick exit if we haven't created any loops. */
5360 if (cfun->machine->doloop_tags == 0)
5361 return;
5363 /* Create an array of mep_doloop structures. */
5364 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5365 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5367 /* Search the function for do-while insns and group them by loop tag. */
5368 for (insn = insns; insn; insn = NEXT_INSN (insn))
5369 if (INSN_P (insn))
5370 switch (recog_memoized (insn))
5372 case CODE_FOR_doloop_begin_internal:
5373 insn_extract (insn);
5374 loop = &loops[INTVAL (recog_data.operand[2])];
5376 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5377 begin->next = loop->begin;
5378 begin->insn = insn;
5379 begin->counter = recog_data.operand[0];
5381 loop->begin = begin;
5382 break;
5384 case CODE_FOR_doloop_end_internal:
5385 insn_extract (insn);
5386 loop = &loops[INTVAL (recog_data.operand[2])];
5388 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5389 end->insn = insn;
5390 end->fallthrough = next_real_insn (insn);
5391 end->counter = recog_data.operand[0];
5392 end->label = recog_data.operand[1];
5393 end->scratch = recog_data.operand[3];
5395 /* If this insn falls through to an unconditional jump,
5396 give it a lower priority than the others. */
5397 if (loop->end != 0 && simplejump_p (end->fallthrough))
5399 end->next = loop->end->next;
5400 loop->end->next = end;
5402 else
5404 end->next = loop->end;
5405 loop->end = end;
5407 break;
5410 /* Convert the insns for each loop in turn. */
5411 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5412 if (mep_repeat_loop_p (loop))
5414 /* Case (1) or (2). */
5415 rtx_code_label *repeat_label;
5416 rtx label_ref;
5418 /* Create a new label for the repeat insn. */
5419 repeat_label = gen_label_rtx ();
5421 /* Replace the doloop_begin with a repeat. */
5422 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5423 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5424 loop->begin->insn);
5425 delete_insn (loop->begin->insn);
5427 /* Insert the repeat label before the first doloop_end.
5428 Fill the gap with nops if there are other doloop_ends. */
5429 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5430 false, loop->end->next != 0);
5432 /* Emit a repeat_end (to improve the readability of the output). */
5433 emit_insn_before (gen_repeat_end (), loop->end->insn);
5435 /* Delete the first doloop_end. */
5436 delete_insn (loop->end->insn);
5438 /* Replace the others with branches to REPEAT_LABEL. */
5439 for (end = loop->end->next; end != 0; end = end->next)
5441 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5442 delete_insn (end->insn);
5443 delete_insn (end->fallthrough);
5446 else
5448 /* Case (3). First replace all the doloop_begins with increment
5449 instructions. */
5450 for (begin = loop->begin; begin != 0; begin = begin->next)
5452 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5453 begin->counter, const1_rtx),
5454 begin->insn);
5455 delete_insn (begin->insn);
5458 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5459 for (end = loop->end; end != 0; end = end->next)
5461 rtx reg;
5463 start_sequence ();
5465 /* Load the counter value into a general register. */
5466 reg = end->counter;
5467 if (!REG_P (reg) || REGNO (reg) > 15)
5469 reg = end->scratch;
5470 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5473 /* Decrement the counter. */
5474 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5475 constm1_rtx));
5477 /* Copy it back to its original location. */
5478 if (reg != end->counter)
5479 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5481 /* Jump back to the start label. */
5482 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5483 end->label));
5484 JUMP_LABEL (insn) = end->label;
5485 LABEL_NUSES (end->label)++;
5487 /* Emit the whole sequence before the doloop_end. */
5488 insn = get_insns ();
5489 end_sequence ();
5490 emit_insn_before (insn, end->insn);
5492 /* Delete the doloop_end. */
5493 delete_insn (end->insn);
5499 static bool
5500 mep_invertable_branch_p (rtx_insn *insn)
5502 rtx cond, set;
5503 enum rtx_code old_code;
5504 int i;
5506 set = PATTERN (insn);
5507 if (GET_CODE (set) != SET)
5508 return false;
5509 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5510 return false;
5511 cond = XEXP (XEXP (set, 1), 0);
5512 old_code = GET_CODE (cond);
5513 switch (old_code)
5515 case EQ:
5516 PUT_CODE (cond, NE);
5517 break;
5518 case NE:
5519 PUT_CODE (cond, EQ);
5520 break;
5521 case LT:
5522 PUT_CODE (cond, GE);
5523 break;
5524 case GE:
5525 PUT_CODE (cond, LT);
5526 break;
5527 default:
5528 return false;
5530 INSN_CODE (insn) = -1;
5531 i = recog_memoized (insn);
5532 PUT_CODE (cond, old_code);
5533 INSN_CODE (insn) = -1;
5534 return i >= 0;
5537 static void
5538 mep_invert_branch (rtx_insn *insn, rtx_insn *after)
5540 rtx cond, set, label;
5541 int i;
5543 set = PATTERN (insn);
5545 gcc_assert (GET_CODE (set) == SET);
5546 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5548 cond = XEXP (XEXP (set, 1), 0);
5549 switch (GET_CODE (cond))
5551 case EQ:
5552 PUT_CODE (cond, NE);
5553 break;
5554 case NE:
5555 PUT_CODE (cond, EQ);
5556 break;
5557 case LT:
5558 PUT_CODE (cond, GE);
5559 break;
5560 case GE:
5561 PUT_CODE (cond, LT);
5562 break;
5563 default:
5564 gcc_unreachable ();
5566 label = gen_label_rtx ();
5567 emit_label_after (label, after);
5568 for (i=1; i<=2; i++)
5569 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5571 rtx ref = XEXP (XEXP (set, 1), i);
5572 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5573 delete_insn (XEXP (ref, 0));
5574 XEXP (ref, 0) = label;
5575 LABEL_NUSES (label) ++;
5576 JUMP_LABEL (insn) = label;
5578 INSN_CODE (insn) = -1;
5579 i = recog_memoized (insn);
5580 gcc_assert (i >= 0);
5583 static void
5584 mep_reorg_erepeat (rtx_insn *insns)
5586 rtx_insn *insn, *prev;
5587 rtx_code_label *l;
5588 rtx x;
5589 int count;
5591 for (insn = insns; insn; insn = NEXT_INSN (insn))
5592 if (JUMP_P (insn)
5593 && mep_invertable_branch_p (insn))
5595 if (dump_file)
5597 fprintf (dump_file, "\n------------------------------\n");
5598 fprintf (dump_file, "erepeat: considering this jump:\n");
5599 print_rtl_single (dump_file, insn);
5601 count = simplejump_p (insn) ? 0 : 1;
5602 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5604 if (CALL_P (prev) || BARRIER_P (prev))
5605 break;
5607 if (prev == JUMP_LABEL (insn))
5609 rtx_insn *newlast;
5610 if (dump_file)
5611 fprintf (dump_file, "found loop top, %d insns\n", count);
5613 if (LABEL_NUSES (prev) == 1)
5614 /* We're the only user, always safe */ ;
5615 else if (LABEL_NUSES (prev) == 2)
5617 /* See if there's a barrier before this label. If
5618 so, we know nobody inside the loop uses it.
5619 But we must be careful to put the erepeat
5620 *after* the label. */
5621 rtx_insn *barrier;
5622 for (barrier = PREV_INSN (prev);
5623 barrier && NOTE_P (barrier);
5624 barrier = PREV_INSN (barrier))
5626 if (barrier && ! BARRIER_P (barrier))
5627 break;
5629 else
5631 /* We don't know who else, within or without our loop, uses this */
5632 if (dump_file)
5633 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5634 break;
5637 /* Generate a label to be used by the erepat insn. */
5638 l = gen_label_rtx ();
5640 /* Insert the erepeat after INSN's target label. */
5641 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5642 LABEL_NUSES (l)++;
5643 emit_insn_after (x, prev);
5645 /* Insert the erepeat label. */
5646 newlast = (mep_insert_repeat_label_last
5647 (insn, l, !simplejump_p (insn), false));
5648 if (simplejump_p (insn))
5650 emit_insn_before (gen_erepeat_end (), insn);
5651 delete_insn (insn);
5653 else
5655 mep_invert_branch (insn, newlast);
5656 emit_insn_after (gen_erepeat_end (), newlast);
5658 break;
5661 if (LABEL_P (prev))
5663 /* A label is OK if there is exactly one user, and we
5664 can find that user before the next label. */
5665 rtx_insn *user = 0;
5666 int safe = 0;
5667 if (LABEL_NUSES (prev) == 1)
5669 for (user = PREV_INSN (prev);
5670 user && (INSN_P (user) || NOTE_P (user));
5671 user = PREV_INSN (user))
5672 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5674 safe = INSN_UID (user);
5675 break;
5678 if (!safe)
5679 break;
5680 if (dump_file)
5681 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5682 safe, INSN_UID (prev));
5685 if (INSN_P (prev))
5687 count ++;
5691 if (dump_file)
5692 fprintf (dump_file, "\n==============================\n");
5695 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5696 always do this on its own. */
5698 static void
5699 mep_jmp_return_reorg (rtx_insn *insns)
5701 rtx_insn *insn, *label, *ret;
5702 int ret_code;
5704 for (insn = insns; insn; insn = NEXT_INSN (insn))
5705 if (simplejump_p (insn))
5707 /* Find the fist real insn the jump jumps to. */
5708 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
5709 while (ret
5710 && (NOTE_P (ret)
5711 || LABEL_P (ret)
5712 || GET_CODE (PATTERN (ret)) == USE))
5713 ret = NEXT_INSN (ret);
5715 if (ret)
5717 /* Is it a return? */
5718 ret_code = recog_memoized (ret);
5719 if (ret_code == CODE_FOR_return_internal
5720 || ret_code == CODE_FOR_eh_return_internal)
5722 /* It is. Replace the jump with a return. */
5723 LABEL_NUSES (label) --;
5724 if (LABEL_NUSES (label) == 0)
5725 delete_insn (label);
5726 PATTERN (insn) = copy_rtx (PATTERN (ret));
5727 INSN_CODE (insn) = -1;
5734 static void
5735 mep_reorg_addcombine (rtx_insn *insns)
5737 rtx_insn *i, *n;
5739 for (i = insns; i; i = NEXT_INSN (i))
5740 if (INSN_P (i)
5741 && INSN_CODE (i) == CODE_FOR_addsi3
5742 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5743 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5744 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5745 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5747 n = NEXT_INSN (i);
5748 if (INSN_P (n)
5749 && INSN_CODE (n) == CODE_FOR_addsi3
5750 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5751 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5752 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5753 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5755 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5756 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5757 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5758 && ic + nc < 32767
5759 && ic + nc > -32768)
5761 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5762 SET_NEXT_INSN (i) = NEXT_INSN (n);
5763 if (NEXT_INSN (i))
5764 SET_PREV_INSN (NEXT_INSN (i)) = i;
5770 /* If this insn adjusts the stack, return the adjustment, else return
5771 zero. */
5772 static int
5773 add_sp_insn_p (rtx_insn *insn)
5775 rtx pat;
5777 if (! single_set (insn))
5778 return 0;
5779 pat = PATTERN (insn);
5780 if (GET_CODE (SET_DEST (pat)) != REG)
5781 return 0;
5782 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5783 return 0;
5784 if (GET_CODE (SET_SRC (pat)) != PLUS)
5785 return 0;
5786 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5787 return 0;
5788 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5789 return 0;
5790 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5791 return 0;
5792 return INTVAL (XEXP (SET_SRC (pat), 1));
5795 /* Check for trivial functions that set up an unneeded stack
5796 frame. */
5797 static void
5798 mep_reorg_noframe (rtx_insn *insns)
5800 rtx_insn *start_frame_insn;
5801 rtx_insn *end_frame_insn = 0;
5802 int sp_adjust, sp2;
5803 rtx sp;
5805 /* The first insn should be $sp = $sp + N */
5806 while (insns && ! INSN_P (insns))
5807 insns = NEXT_INSN (insns);
5808 if (!insns)
5809 return;
5811 sp_adjust = add_sp_insn_p (insns);
5812 if (sp_adjust == 0)
5813 return;
5815 start_frame_insn = insns;
5816 sp = SET_DEST (PATTERN (start_frame_insn));
5818 insns = next_real_insn (insns);
5820 while (insns)
5822 rtx_insn *next = next_real_insn (insns);
5823 if (!next)
5824 break;
5826 sp2 = add_sp_insn_p (insns);
5827 if (sp2)
5829 if (end_frame_insn)
5830 return;
5831 end_frame_insn = insns;
5832 if (sp2 != -sp_adjust)
5833 return;
5835 else if (mep_mentioned_p (insns, sp, 0))
5836 return;
5837 else if (CALL_P (insns))
5838 return;
5840 insns = next;
5843 if (end_frame_insn)
5845 delete_insn (start_frame_insn);
5846 delete_insn (end_frame_insn);
5850 static void
5851 mep_reorg (void)
5853 rtx_insn *insns = get_insns ();
5855 /* We require accurate REG_DEAD notes. */
5856 compute_bb_for_insn ();
5857 df_note_add_problem ();
5858 df_analyze ();
5860 mep_reorg_addcombine (insns);
5861 #if EXPERIMENTAL_REGMOVE_REORG
5862 /* VLIW packing has been done already, so we can't just delete things. */
5863 if (!mep_vliw_function_p (cfun->decl))
5864 mep_reorg_regmove (insns);
5865 #endif
5866 mep_jmp_return_reorg (insns);
5867 mep_bundle_insns (insns);
5868 mep_reorg_repeat (insns);
5869 if (optimize
5870 && !profile_flag
5871 && !profile_arc_flag
5872 && TARGET_OPT_REPEAT
5873 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5874 mep_reorg_erepeat (insns);
5876 /* This may delete *insns so make sure it's last. */
5877 mep_reorg_noframe (insns);
5879 df_finish_pass (false);
5884 /*----------------------------------------------------------------------*/
5885 /* Builtins */
5886 /*----------------------------------------------------------------------*/
5888 /* Element X gives the index into cgen_insns[] of the most general
5889 implementation of intrinsic X. Unimplemented intrinsics are
5890 mapped to -1. */
5891 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5893 /* Element X gives the index of another instruction that is mapped to
5894 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5895 instruction.
5897 Things are set up so that mep_intrinsic_chain[X] < X. */
5898 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5900 /* The bitmask for the current ISA. The ISA masks are declared
5901 in mep-intrin.h. */
5902 unsigned int mep_selected_isa;
5904 struct mep_config {
5905 const char *config_name;
5906 unsigned int isa;
5909 static struct mep_config mep_configs[] = {
5910 #ifdef COPROC_SELECTION_TABLE
5911 COPROC_SELECTION_TABLE,
5912 #endif
5913 { 0, 0 }
5916 /* Initialize the global intrinsics variables above. */
5918 static void
5919 mep_init_intrinsics (void)
5921 size_t i;
5923 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5924 mep_selected_isa = mep_configs[0].isa;
5925 if (mep_config_string != 0)
5926 for (i = 0; mep_configs[i].config_name; i++)
5927 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5929 mep_selected_isa = mep_configs[i].isa;
5930 break;
5933 /* Assume all intrinsics are unavailable. */
5934 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5935 mep_intrinsic_insn[i] = -1;
5937 /* Build up the global intrinsic tables. */
5938 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5939 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5941 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5942 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5944 /* See whether we can directly move values between one coprocessor
5945 register and another. */
5946 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5947 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5948 mep_have_copro_copro_moves_p = true;
5950 /* See whether we can directly move values between core and
5951 coprocessor registers. */
5952 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5953 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5955 mep_have_core_copro_moves_p = 1;
5958 /* Declare all available intrinsic functions. Called once only. */
5960 static tree cp_data_bus_int_type_node;
5961 static tree opaque_vector_type_node;
5962 static tree v8qi_type_node;
5963 static tree v4hi_type_node;
5964 static tree v2si_type_node;
5965 static tree v8uqi_type_node;
5966 static tree v4uhi_type_node;
5967 static tree v2usi_type_node;
5969 static tree
5970 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5972 switch (cr)
5974 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5975 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5976 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5977 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5978 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5979 case cgen_regnum_operand_type_CHAR: return char_type_node;
5980 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5981 case cgen_regnum_operand_type_SI: return intSI_type_node;
5982 case cgen_regnum_operand_type_DI: return intDI_type_node;
5983 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5984 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5985 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5986 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5987 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5988 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5989 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5990 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5991 default:
5992 return void_type_node;
5996 static void
5997 mep_init_builtins (void)
5999 size_t i;
6001 if (TARGET_64BIT_CR_REGS)
6002 cp_data_bus_int_type_node = long_long_integer_type_node;
6003 else
6004 cp_data_bus_int_type_node = long_integer_type_node;
6006 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6007 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6008 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6009 v2si_type_node = build_vector_type (intSI_type_node, 2);
6010 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6011 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6012 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6014 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
6016 add_builtin_type ("cp_vector", opaque_vector_type_node);
6018 add_builtin_type ("cp_v8qi", v8qi_type_node);
6019 add_builtin_type ("cp_v4hi", v4hi_type_node);
6020 add_builtin_type ("cp_v2si", v2si_type_node);
6022 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
6023 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
6024 add_builtin_type ("cp_v2usi", v2usi_type_node);
6026 /* Intrinsics like mep_cadd3 are implemented with two groups of
6027 instructions, one which uses UNSPECs and one which uses a specific
6028 rtl code such as PLUS. Instructions in the latter group belong
6029 to GROUP_KNOWN_CODE.
6031 In such cases, the intrinsic will have two entries in the global
6032 tables above. The unspec form is accessed using builtin functions
6033 while the specific form is accessed using the mep_* enum in
6034 mep-intrin.h.
6036 The idea is that __cop arithmetic and builtin functions have
6037 different optimization requirements. If mep_cadd3() appears in
6038 the source code, the user will surely except gcc to use cadd3
6039 rather than a work-alike such as add3. However, if the user
6040 just writes "a + b", where a or b are __cop variables, it is
6041 reasonable for gcc to choose a core instruction rather than
6042 cadd3 if it believes that is more optimal. */
6043 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6044 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6045 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6047 tree ret_type = void_type_node;
6048 tree bi_type;
6050 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6051 continue;
6053 if (cgen_insns[i].cret_p)
6054 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6056 bi_type = build_function_type_list (ret_type, NULL_TREE);
6057 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6058 bi_type,
6059 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6063 /* Report the unavailablity of the given intrinsic. */
6065 #if 1
6066 static void
6067 mep_intrinsic_unavailable (int intrinsic)
6069 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6071 if (already_reported_p[intrinsic])
6072 return;
6074 if (mep_intrinsic_insn[intrinsic] < 0)
6075 error ("coprocessor intrinsic %qs is not available in this configuration",
6076 cgen_intrinsics[intrinsic]);
6077 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6078 error ("%qs is not available in VLIW functions",
6079 cgen_intrinsics[intrinsic]);
6080 else
6081 error ("%qs is not available in non-VLIW functions",
6082 cgen_intrinsics[intrinsic]);
6084 already_reported_p[intrinsic] = 1;
6086 #endif
6089 /* See if any implementation of INTRINSIC is available to the
6090 current function. If so, store the most general implementation
6091 in *INSN_PTR and return true. Return false otherwise. */
6093 static bool
6094 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6096 int i;
6098 i = mep_intrinsic_insn[intrinsic];
6099 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6100 i = mep_intrinsic_chain[i];
6102 if (i >= 0)
6104 *insn_ptr = &cgen_insns[i];
6105 return true;
6107 return false;
6111 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6112 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6113 try using a work-alike instead. In this case, the returned insn
6114 may have three operands rather than two. */
6116 static bool
6117 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6119 size_t i;
6121 if (intrinsic == mep_cmov)
6123 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6124 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6125 return true;
6126 return false;
6128 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6132 /* If ARG is a register operand that is the same size as MODE, convert it
6133 to MODE using a subreg. Otherwise return ARG as-is. */
6135 static rtx
6136 mep_convert_arg (machine_mode mode, rtx arg)
6138 if (GET_MODE (arg) != mode
6139 && register_operand (arg, VOIDmode)
6140 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6141 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6142 return arg;
6146 /* Apply regnum conversions to ARG using the description given by REGNUM.
6147 Return the new argument on success and null on failure. */
6149 static rtx
6150 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6152 if (regnum->count == 0)
6153 return arg;
6155 if (GET_CODE (arg) != CONST_INT
6156 || INTVAL (arg) < 0
6157 || INTVAL (arg) >= regnum->count)
6158 return 0;
6160 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6164 /* Try to make intrinsic argument ARG match the given operand.
6165 UNSIGNED_P is true if the argument has an unsigned type. */
6167 static rtx
6168 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6169 int unsigned_p)
6171 if (GET_CODE (arg) == CONST_INT)
6173 /* CONST_INTs can only be bound to integer operands. */
6174 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6175 return 0;
6177 else if (GET_CODE (arg) == CONST_DOUBLE)
6178 /* These hold vector constants. */;
6179 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6181 /* If the argument is a different size from what's expected, we must
6182 have a value in the right mode class in order to convert it. */
6183 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6184 return 0;
6186 /* If the operand is an rvalue, promote or demote it to match the
6187 operand's size. This might not need extra instructions when
6188 ARG is a register value. */
6189 if (operand->constraint[0] != '=')
6190 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6193 /* If the operand is an lvalue, bind the operand to a new register.
6194 The caller will copy this value into ARG after the main
6195 instruction. By doing this always, we produce slightly more
6196 optimal code. */
6197 /* But not for control registers. */
6198 if (operand->constraint[0] == '='
6199 && (! REG_P (arg)
6200 || ! (CONTROL_REGNO_P (REGNO (arg))
6201 || CCR_REGNO_P (REGNO (arg))
6202 || CR_REGNO_P (REGNO (arg)))
6204 return gen_reg_rtx (operand->mode);
6206 /* Try simple mode punning. */
6207 arg = mep_convert_arg (operand->mode, arg);
6208 if (operand->predicate (arg, operand->mode))
6209 return arg;
6211 /* See if forcing the argument into a register will make it match. */
6212 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6213 arg = force_reg (operand->mode, arg);
6214 else
6215 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6216 if (operand->predicate (arg, operand->mode))
6217 return arg;
6219 return 0;
6223 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6224 function FNNAME. OPERAND describes the operand to which ARGNUM
6225 is mapped. */
6227 static void
6228 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6229 int argnum, tree fnname)
6231 size_t i;
6233 if (GET_CODE (arg) == CONST_INT)
6234 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6235 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6237 const struct cgen_immediate_predicate *predicate;
6238 HOST_WIDE_INT argval;
6240 predicate = &cgen_immediate_predicates[i];
6241 argval = INTVAL (arg);
6242 if (argval < predicate->lower || argval >= predicate->upper)
6243 error ("argument %d of %qE must be in the range %d...%d",
6244 argnum, fnname, predicate->lower, predicate->upper - 1);
6245 else
6246 error ("argument %d of %qE must be a multiple of %d",
6247 argnum, fnname, predicate->align);
6248 return;
6251 error ("incompatible type for argument %d of %qE", argnum, fnname);
6254 static rtx
6255 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6256 rtx subtarget ATTRIBUTE_UNUSED,
6257 machine_mode mode ATTRIBUTE_UNUSED,
6258 int ignore ATTRIBUTE_UNUSED)
6260 rtx pat, op[10], arg[10];
6261 unsigned int a;
6262 int opindex, unsigned_p[10];
6263 tree fndecl, args;
6264 unsigned int n_args;
6265 tree fnname;
6266 const struct cgen_insn *cgen_insn;
6267 const struct insn_data_d *idata;
6268 unsigned int first_arg = 0;
6269 unsigned int builtin_n_args;
6271 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6272 fnname = DECL_NAME (fndecl);
6274 /* Find out which instruction we should emit. Note that some coprocessor
6275 intrinsics may only be available in VLIW mode, or only in normal mode. */
6276 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6278 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6279 return NULL_RTX;
6281 idata = &insn_data[cgen_insn->icode];
6283 builtin_n_args = cgen_insn->num_args;
6285 if (cgen_insn->cret_p)
6287 if (cgen_insn->cret_p > 1)
6288 builtin_n_args ++;
6289 first_arg = 1;
6290 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6291 builtin_n_args --;
6294 /* Evaluate each argument. */
6295 n_args = call_expr_nargs (exp);
6297 if (n_args < builtin_n_args)
6299 error ("too few arguments to %qE", fnname);
6300 return NULL_RTX;
6302 if (n_args > builtin_n_args)
6304 error ("too many arguments to %qE", fnname);
6305 return NULL_RTX;
6308 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6310 tree value;
6312 args = CALL_EXPR_ARG (exp, a - first_arg);
6314 value = args;
6316 #if 0
6317 if (cgen_insn->regnums[a].reference_p)
6319 if (TREE_CODE (value) != ADDR_EXPR)
6321 debug_tree(value);
6322 error ("argument %d of %qE must be an address", a+1, fnname);
6323 return NULL_RTX;
6325 value = TREE_OPERAND (value, 0);
6327 #endif
6329 /* If the argument has been promoted to int, get the unpromoted
6330 value. This is necessary when sub-int memory values are bound
6331 to reference parameters. */
6332 if (TREE_CODE (value) == NOP_EXPR
6333 && TREE_TYPE (value) == integer_type_node
6334 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6335 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6336 < TYPE_PRECISION (TREE_TYPE (value))))
6337 value = TREE_OPERAND (value, 0);
6339 /* If the argument has been promoted to double, get the unpromoted
6340 SFmode value. This is necessary for FMAX support, for example. */
6341 if (TREE_CODE (value) == NOP_EXPR
6342 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6343 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6344 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6345 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6346 value = TREE_OPERAND (value, 0);
6348 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6349 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6350 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6351 if (cgen_insn->regnums[a].reference_p)
6353 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6354 machine_mode pointed_mode = TYPE_MODE (pointed_to);
6356 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6358 if (arg[a] == 0)
6360 error ("argument %d of %qE must be in the range %d...%d",
6361 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6362 return NULL_RTX;
6366 for (a = 0; a < first_arg; a++)
6368 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6369 arg[a] = target;
6370 else
6371 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6374 /* Convert the arguments into a form suitable for the intrinsic.
6375 Report an error if this isn't possible. */
6376 for (opindex = 0; opindex < idata->n_operands; opindex++)
6378 a = cgen_insn->op_mapping[opindex];
6379 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6380 arg[a], unsigned_p[a]);
6381 if (op[opindex] == 0)
6383 mep_incompatible_arg (&idata->operand[opindex],
6384 arg[a], a + 1 - first_arg, fnname);
6385 return NULL_RTX;
6389 /* Emit the instruction. */
6390 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6391 op[5], op[6], op[7], op[8], op[9]);
6393 if (GET_CODE (pat) == SET
6394 && GET_CODE (SET_DEST (pat)) == PC
6395 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6396 emit_jump_insn (pat);
6397 else
6398 emit_insn (pat);
6400 /* Copy lvalues back to their final locations. */
6401 for (opindex = 0; opindex < idata->n_operands; opindex++)
6402 if (idata->operand[opindex].constraint[0] == '=')
6404 a = cgen_insn->op_mapping[opindex];
6405 if (a >= first_arg)
6407 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6408 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6409 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6410 op[opindex]));
6411 else
6413 /* First convert the operand to the right mode, then copy it
6414 into the destination. Doing the conversion as a separate
6415 step (rather than using convert_move) means that we can
6416 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6417 refer to the same register. */
6418 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6419 op[opindex], unsigned_p[a]);
6420 if (!rtx_equal_p (arg[a], op[opindex]))
6421 emit_move_insn (arg[a], op[opindex]);
6426 if (first_arg > 0 && target && target != op[0])
6428 emit_move_insn (target, op[0]);
6431 return target;
6434 static bool
6435 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
6437 return false;
6440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6441 a global register. */
6443 static bool
6444 global_reg_mentioned_p_1 (const_rtx x)
6446 int regno;
6448 switch (GET_CODE (x))
6450 case SUBREG:
6451 if (REG_P (SUBREG_REG (x)))
6453 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6454 && global_regs[subreg_regno (x)])
6455 return true;
6456 return false;
6458 break;
6460 case REG:
6461 regno = REGNO (x);
6462 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6463 return true;
6464 return false;
6466 case CALL:
6467 /* A non-constant call might use a global register. */
6468 return true;
6470 default:
6471 break;
6474 return false;
6477 /* Returns nonzero if X mentions a global register. */
6479 static bool
6480 global_reg_mentioned_p (rtx x)
6482 if (INSN_P (x))
6484 if (CALL_P (x))
6486 if (! RTL_CONST_OR_PURE_CALL_P (x))
6487 return true;
6488 x = CALL_INSN_FUNCTION_USAGE (x);
6489 if (x == 0)
6490 return false;
6492 else
6493 x = PATTERN (x);
6496 subrtx_iterator::array_type array;
6497 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6498 if (global_reg_mentioned_p_1 (*iter))
6499 return true;
6500 return false;
6502 /* Scheduling hooks for VLIW mode.
6504 Conceptually this is very simple: we have a two-pack architecture
6505 that takes one core insn and one coprocessor insn to make up either
6506 a 32- or 64-bit instruction word (depending on the option bit set in
6507 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6508 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6509 and one 48-bit cop insn or two 32-bit core/cop insns.
6511 In practice, instruction selection will be a bear. Consider in
6512 VL64 mode the following insns
6514 add $1, 1
6515 cmov $cr0, $0
6517 these cannot pack, since the add is a 16-bit core insn and cmov
6518 is a 32-bit cop insn. However,
6520 add3 $1, $1, 1
6521 cmov $cr0, $0
6523 packs just fine. For good VLIW code generation in VL64 mode, we
6524 will have to have 32-bit alternatives for many of the common core
6525 insns. Not implemented. */
6527 static int
6528 mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
6530 int cost_specified;
6532 if (REG_NOTE_KIND (link) != 0)
6534 /* See whether INSN and DEP_INSN are intrinsics that set the same
6535 hard register. If so, it is more important to free up DEP_INSN
6536 than it is to free up INSN.
6538 Note that intrinsics like mep_mulr are handled differently from
6539 the equivalent mep.md patterns. In mep.md, if we don't care
6540 about the value of $lo and $hi, the pattern will just clobber
6541 the registers, not set them. Since clobbers don't count as
6542 output dependencies, it is often possible to reorder two mulrs,
6543 even after reload.
6545 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6546 so any pair of mep_mulr()s will be inter-dependent. We should
6547 therefore give the first mep_mulr() a higher priority. */
6548 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6549 && global_reg_mentioned_p (PATTERN (insn))
6550 && global_reg_mentioned_p (PATTERN (dep_insn)))
6551 return 1;
6553 /* If the dependence is an anti or output dependence, assume it
6554 has no cost. */
6555 return 0;
6558 /* If we can't recognize the insns, we can't really do anything. */
6559 if (recog_memoized (dep_insn) < 0)
6560 return cost;
6562 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6563 attribute instead. */
6564 if (!TARGET_H1)
6566 cost_specified = get_attr_latency (dep_insn);
6567 if (cost_specified != 0)
6568 return cost_specified;
6571 return cost;
6574 /* ??? We don't properly compute the length of a load/store insn,
6575 taking into account the addressing mode. */
6577 static int
6578 mep_issue_rate (void)
6580 return TARGET_IVC2 ? 3 : 2;
6583 /* Return true if function DECL was declared with the vliw attribute. */
6585 bool
6586 mep_vliw_function_p (tree decl)
6588 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6591 static rtx_insn *
6592 mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6593 int length)
6595 int i;
6597 for (i = nready - 1; i >= 0; --i)
6599 rtx_insn *insn = ready[i];
6600 if (recog_memoized (insn) >= 0
6601 && get_attr_slot (insn) == slot
6602 && get_attr_length (insn) == length)
6603 return insn;
6606 return NULL;
6609 static void
6610 mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
6612 int i;
6614 for (i = 0; i < nready; ++i)
6615 if (ready[i] == insn)
6617 for (; i < nready - 1; ++i)
6618 ready[i] = ready[i + 1];
6619 ready[i] = insn;
6620 return;
6623 gcc_unreachable ();
6626 static void
6627 mep_print_sched_insn (FILE *dump, rtx_insn *insn)
6629 const char *slots = "none";
6630 const char *name = NULL;
6631 int code;
6632 char buf[30];
6634 if (GET_CODE (PATTERN (insn)) == SET
6635 || GET_CODE (PATTERN (insn)) == PARALLEL)
6637 switch (get_attr_slots (insn))
6639 case SLOTS_CORE: slots = "core"; break;
6640 case SLOTS_C3: slots = "c3"; break;
6641 case SLOTS_P0: slots = "p0"; break;
6642 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6643 case SLOTS_P0_P1: slots = "p0,p1"; break;
6644 case SLOTS_P0S: slots = "p0s"; break;
6645 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6646 case SLOTS_P1: slots = "p1"; break;
6647 default:
6648 sprintf(buf, "%d", get_attr_slots (insn));
6649 slots = buf;
6650 break;
6653 if (GET_CODE (PATTERN (insn)) == USE)
6654 slots = "use";
6656 code = INSN_CODE (insn);
6657 if (code >= 0)
6658 name = get_insn_name (code);
6659 if (!name)
6660 name = "{unknown}";
6662 fprintf (dump,
6663 "insn %4d %4d %8s %s\n",
6664 code,
6665 INSN_UID (insn),
6666 name,
6667 slots);
6670 static int
6671 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6672 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
6673 int *pnready, int clock ATTRIBUTE_UNUSED)
6675 int nready = *pnready;
6676 rtx_insn *core_insn, *cop_insn;
6677 int i;
6679 if (dump && sched_verbose > 1)
6681 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6682 for (i=0; i<nready; i++)
6683 mep_print_sched_insn (dump, ready[i]);
6684 fprintf (dump, "\n");
6687 if (!mep_vliw_function_p (cfun->decl))
6688 return 1;
6689 if (nready < 2)
6690 return 1;
6692 /* IVC2 uses a DFA to determine what's ready and what's not. */
6693 if (TARGET_IVC2)
6694 return nready;
6696 /* We can issue either a core or coprocessor instruction.
6697 Look for a matched pair of insns to reorder. If we don't
6698 find any, don't second-guess the scheduler's priorities. */
6700 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6701 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6702 TARGET_OPT_VL64 ? 6 : 2)))
6704 else if (TARGET_OPT_VL64
6705 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6706 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6708 else
6709 /* We didn't find a pair. Issue the single insn at the head
6710 of the ready list. */
6711 return 1;
6713 /* Reorder the two insns first. */
6714 mep_move_ready_insn (ready, nready, core_insn);
6715 mep_move_ready_insn (ready, nready - 1, cop_insn);
6716 return 2;
6719 /* Return true if X contains a register that is set by insn PREV. */
6721 static bool
6722 mep_store_find_set (const_rtx x, const rtx_insn *prev)
6724 subrtx_iterator::array_type array;
6725 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6726 if (REG_P (x) && reg_set_p (x, prev))
6727 return true;
6728 return false;
6731 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6732 not the containing insn. */
6734 static bool
6735 mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
6737 /* Cope with intrinsics like swcpa. */
6738 if (GET_CODE (pat) == PARALLEL)
6740 int i;
6742 for (i = 0; i < XVECLEN (pat, 0); i++)
6743 if (mep_store_data_bypass_p (prev,
6744 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
6745 return true;
6747 return false;
6750 /* Check for some sort of store. */
6751 if (GET_CODE (pat) != SET
6752 || GET_CODE (SET_DEST (pat)) != MEM)
6753 return false;
6755 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6756 The first operand to the unspec is the store data and the other operands
6757 are used to calculate the address. */
6758 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6760 rtx src;
6761 int i;
6763 src = SET_SRC (pat);
6764 for (i = 1; i < XVECLEN (src, 0); i++)
6765 if (mep_store_find_set (XVECEXP (src, 0, i), prev))
6766 return false;
6768 return true;
6771 /* Otherwise just check that PREV doesn't modify any register mentioned
6772 in the memory destination. */
6773 return !mep_store_find_set (SET_DEST (pat), prev);
6776 /* Return true if INSN is a store instruction and if the store address
6777 has no true dependence on PREV. */
6779 bool
6780 mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
6782 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6785 /* Return true if, apart from HI/LO, there are no true dependencies
6786 between multiplication instructions PREV and INSN. */
6788 bool
6789 mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
6791 rtx pat;
6793 pat = PATTERN (insn);
6794 if (GET_CODE (pat) == PARALLEL)
6795 pat = XVECEXP (pat, 0, 0);
6796 if (GET_CODE (pat) != SET)
6797 return false;
6798 subrtx_iterator::array_type array;
6799 FOR_EACH_SUBRTX (iter, array, SET_SRC (pat), NONCONST)
6801 const_rtx x = *iter;
6802 if (REG_P (x)
6803 && REGNO (x) != LO_REGNO
6804 && REGNO (x) != HI_REGNO
6805 && reg_set_p (x, prev))
6806 return false;
6808 return true;
6811 /* Return true if INSN is an ldc instruction that issues to the
6812 MeP-h1 integer pipeline. This is true for instructions that
6813 read from PSW, LP, SAR, HI and LO. */
6815 bool
6816 mep_ipipe_ldc_p (rtx_insn *insn)
6818 rtx pat, src;
6820 pat = PATTERN (insn);
6822 /* Cope with instrinsics that set both a hard register and its shadow.
6823 The set of the hard register comes first. */
6824 if (GET_CODE (pat) == PARALLEL)
6825 pat = XVECEXP (pat, 0, 0);
6827 if (GET_CODE (pat) == SET)
6829 src = SET_SRC (pat);
6831 /* Cope with intrinsics. The first operand to the unspec is
6832 the source register. */
6833 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6834 src = XVECEXP (src, 0, 0);
6836 if (REG_P (src))
6837 switch (REGNO (src))
6839 case PSW_REGNO:
6840 case LP_REGNO:
6841 case SAR_REGNO:
6842 case HI_REGNO:
6843 case LO_REGNO:
6844 return true;
6847 return false;
6850 /* Create a VLIW bundle from core instruction CORE and coprocessor
6851 instruction COP. COP always satisfies INSN_P, but CORE can be
6852 either a new pattern or an existing instruction.
6854 Emit the bundle in place of COP and return it. */
6856 static rtx_insn *
6857 mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
6859 rtx seq;
6860 rtx_insn *core_insn;
6861 rtx_insn *insn;
6863 /* If CORE is an existing instruction, remove it, otherwise put
6864 the new pattern in an INSN harness. */
6865 if (INSN_P (core_insn_or_pat))
6867 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6868 remove_insn (core_insn);
6870 else
6871 core_insn = make_insn_raw (core_insn_or_pat);
6873 /* Generate the bundle sequence and replace COP with it. */
6874 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
6875 insn = emit_insn_after (seq, cop);
6876 remove_insn (cop);
6878 /* Set up the links of the insns inside the SEQUENCE. */
6879 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6880 SET_NEXT_INSN (core_insn) = cop;
6881 SET_PREV_INSN (cop) = core_insn;
6882 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
6884 /* Set the VLIW flag for the coprocessor instruction. */
6885 PUT_MODE (core_insn, VOIDmode);
6886 PUT_MODE (cop, BImode);
6888 /* Derive a location for the bundle. Individual instructions cannot
6889 have their own location because there can be no assembler labels
6890 between CORE_INSN and COP. */
6891 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6892 INSN_LOCATION (core_insn) = 0;
6893 INSN_LOCATION (cop) = 0;
6895 return insn;
6898 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6900 static void
6901 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6903 rtx * pinsn = (rtx *) data;
6905 if (*pinsn && reg_mentioned_p (x, *pinsn))
6906 *pinsn = NULL_RTX;
6909 /* Return true if anything in insn X is (anti,output,true) dependent on
6910 anything in insn Y. */
6912 static int
6913 mep_insn_dependent_p (rtx x, rtx y)
6915 rtx tmp;
6917 gcc_assert (INSN_P (x));
6918 gcc_assert (INSN_P (y));
6920 tmp = PATTERN (y);
6921 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6922 if (tmp == NULL_RTX)
6923 return 1;
6925 tmp = PATTERN (x);
6926 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6927 if (tmp == NULL_RTX)
6928 return 1;
6930 return 0;
6933 static int
6934 core_insn_p (rtx_insn *insn)
6936 if (GET_CODE (PATTERN (insn)) == USE)
6937 return 0;
6938 if (get_attr_slot (insn) == SLOT_CORE)
6939 return 1;
6940 return 0;
6943 /* Mark coprocessor instructions that can be bundled together with
6944 the immediately preceding core instruction. This is later used
6945 to emit the "+" that tells the assembler to create a VLIW insn.
6947 For unbundled insns, the assembler will automatically add coprocessor
6948 nops, and 16-bit core nops. Due to an apparent oversight in the
6949 spec, the assembler will _not_ automatically add 32-bit core nops,
6950 so we have to emit those here.
6952 Called from mep_insn_reorg. */
6954 static void
6955 mep_bundle_insns (rtx_insn *insns)
6957 rtx_insn *insn, *last = NULL, *first = NULL;
6958 int saw_scheduling = 0;
6960 /* Only do bundling if we're in vliw mode. */
6961 if (!mep_vliw_function_p (cfun->decl))
6962 return;
6964 /* The first insn in a bundle are TImode, the remainder are
6965 VOIDmode. After this function, the first has VOIDmode and the
6966 rest have BImode. */
6968 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6970 /* First, move any NOTEs that are within a bundle, to the beginning
6971 of the bundle. */
6972 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6974 if (NOTE_P (insn) && first)
6975 /* Don't clear FIRST. */;
6977 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6978 first = insn;
6980 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6982 rtx_insn *note, *prev;
6984 /* INSN is part of a bundle; FIRST is the first insn in that
6985 bundle. Move all intervening notes out of the bundle.
6986 In addition, since the debug pass may insert a label
6987 whenever the current line changes, set the location info
6988 for INSN to match FIRST. */
6990 INSN_LOCATION (insn) = INSN_LOCATION (first);
6992 note = PREV_INSN (insn);
6993 while (note && note != first)
6995 prev = PREV_INSN (note);
6997 if (NOTE_P (note))
6999 /* Remove NOTE from here... */
7000 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7001 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7002 /* ...and put it in here. */
7003 SET_NEXT_INSN (note) = first;
7004 SET_PREV_INSN (note) = PREV_INSN (first);
7005 SET_NEXT_INSN (PREV_INSN (note)) = note;
7006 SET_PREV_INSN (NEXT_INSN (note)) = note;
7009 note = prev;
7013 else if (!NONJUMP_INSN_P (insn))
7014 first = 0;
7017 /* Now fix up the bundles. */
7018 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7020 if (NOTE_P (insn))
7021 continue;
7023 if (!NONJUMP_INSN_P (insn))
7025 last = 0;
7026 continue;
7029 /* If we're not optimizing enough, there won't be scheduling
7030 info. We detect that here. */
7031 if (GET_MODE (insn) == TImode)
7032 saw_scheduling = 1;
7033 if (!saw_scheduling)
7034 continue;
7036 if (TARGET_IVC2)
7038 rtx_insn *core_insn = NULL;
7040 /* IVC2 slots are scheduled by DFA, so we just accept
7041 whatever the scheduler gives us. However, we must make
7042 sure the core insn (if any) is the first in the bundle.
7043 The IVC2 assembler can insert whatever NOPs are needed,
7044 and allows a COP insn to be first. */
7046 if (NONJUMP_INSN_P (insn)
7047 && GET_CODE (PATTERN (insn)) != USE
7048 && GET_MODE (insn) == TImode)
7050 for (last = insn;
7051 NEXT_INSN (last)
7052 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7053 && NONJUMP_INSN_P (NEXT_INSN (last));
7054 last = NEXT_INSN (last))
7056 if (core_insn_p (last))
7057 core_insn = last;
7059 if (core_insn_p (last))
7060 core_insn = last;
7062 if (core_insn && core_insn != insn)
7064 /* Swap core insn to first in the bundle. */
7066 /* Remove core insn. */
7067 if (PREV_INSN (core_insn))
7068 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7069 if (NEXT_INSN (core_insn))
7070 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7072 /* Re-insert core insn. */
7073 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
7074 SET_NEXT_INSN (core_insn) = insn;
7076 if (PREV_INSN (core_insn))
7077 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7078 SET_PREV_INSN (insn) = core_insn;
7080 PUT_MODE (core_insn, TImode);
7081 PUT_MODE (insn, VOIDmode);
7085 /* The first insn has TImode, the rest have VOIDmode */
7086 if (GET_MODE (insn) == TImode)
7087 PUT_MODE (insn, VOIDmode);
7088 else
7089 PUT_MODE (insn, BImode);
7090 continue;
7093 PUT_MODE (insn, VOIDmode);
7094 if (recog_memoized (insn) >= 0
7095 && get_attr_slot (insn) == SLOT_COP)
7097 if (JUMP_P (insn)
7098 || ! last
7099 || recog_memoized (last) < 0
7100 || get_attr_slot (last) != SLOT_CORE
7101 || (get_attr_length (insn)
7102 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7103 || mep_insn_dependent_p (insn, last))
7105 switch (get_attr_length (insn))
7107 case 8:
7108 break;
7109 case 6:
7110 insn = mep_make_bundle (gen_nop (), insn);
7111 break;
7112 case 4:
7113 if (TARGET_OPT_VL64)
7114 insn = mep_make_bundle (gen_nop32 (), insn);
7115 break;
7116 case 2:
7117 if (TARGET_OPT_VL64)
7118 error ("2 byte cop instructions are"
7119 " not allowed in 64-bit VLIW mode");
7120 else
7121 insn = mep_make_bundle (gen_nop (), insn);
7122 break;
7123 default:
7124 error ("unexpected %d byte cop instruction",
7125 get_attr_length (insn));
7126 break;
7129 else
7130 insn = mep_make_bundle (last, insn);
7133 last = insn;
7138 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7139 Return true on success. This function can fail if the intrinsic
7140 is unavailable or if the operands don't satisfy their predicates. */
7142 bool
7143 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7145 const struct cgen_insn *cgen_insn;
7146 const struct insn_data_d *idata;
7147 rtx newop[10];
7148 int i;
7150 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7151 return false;
7153 idata = &insn_data[cgen_insn->icode];
7154 for (i = 0; i < idata->n_operands; i++)
7156 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7157 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7158 return false;
7161 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7162 newop[3], newop[4], newop[5],
7163 newop[6], newop[7], newop[8]));
7165 return true;
7169 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7170 OPERANDS[0]. Report an error if the instruction could not
7171 be synthesized. OPERANDS[1] is a register_operand. For sign
7172 and zero extensions, it may be smaller than SImode. */
7174 bool
7175 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7176 rtx * operands ATTRIBUTE_UNUSED)
7178 return false;
7182 /* Likewise, but apply a binary operation to OPERANDS[1] and
7183 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7184 can be a general_operand.
7186 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7187 third operand. REG and REG3 take register operands only. */
7189 bool
7190 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7191 int ATTRIBUTE_UNUSED immediate3,
7192 int ATTRIBUTE_UNUSED reg,
7193 int ATTRIBUTE_UNUSED reg3,
7194 rtx * operands ATTRIBUTE_UNUSED)
7196 return false;
7199 static bool
7200 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7201 int opno ATTRIBUTE_UNUSED, int *total,
7202 bool ATTRIBUTE_UNUSED speed_t)
7204 switch (code)
7206 case CONST_INT:
7207 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7208 *total = 0;
7209 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7210 *total = 1;
7211 else
7212 *total = 3;
7213 return true;
7215 case SYMBOL_REF:
7216 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7217 return true;
7219 case MULT:
7220 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7221 ? COSTS_N_INSNS (3)
7222 : COSTS_N_INSNS (2));
7223 return true;
7225 return false;
7228 static int
7229 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7230 machine_mode mode ATTRIBUTE_UNUSED,
7231 addr_space_t as ATTRIBUTE_UNUSED,
7232 bool ATTRIBUTE_UNUSED speed_p)
7234 return 1;
7237 static void
7238 mep_asm_init_sections (void)
7240 based_section
7241 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7242 "\t.section .based,\"aw\"");
7244 tinybss_section
7245 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7246 "\t.section .sbss,\"aw\"");
7248 sdata_section
7249 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7250 "\t.section .sdata,\"aw\",@progbits");
7252 far_section
7253 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7254 "\t.section .far,\"aw\"");
7256 farbss_section
7257 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7258 "\t.section .farbss,\"aw\"");
7260 frodata_section
7261 = get_unnamed_section (0, output_section_asm_op,
7262 "\t.section .frodata,\"a\"");
7264 srodata_section
7265 = get_unnamed_section (0, output_section_asm_op,
7266 "\t.section .srodata,\"a\"");
7268 vtext_section
7269 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7270 "\t.section .vtext,\"axv\"\n\t.vliw");
7272 vftext_section
7273 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7274 "\t.section .vftext,\"axv\"\n\t.vliw");
7276 ftext_section
7277 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7278 "\t.section .ftext,\"ax\"\n\t.core");
7282 /* Initialize the GCC target structure. */
7284 #undef TARGET_ASM_FUNCTION_PROLOGUE
7285 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7286 #undef TARGET_ATTRIBUTE_TABLE
7287 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7288 #undef TARGET_COMP_TYPE_ATTRIBUTES
7289 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7290 #undef TARGET_INSERT_ATTRIBUTES
7291 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7292 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7293 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7294 #undef TARGET_CAN_INLINE_P
7295 #define TARGET_CAN_INLINE_P mep_can_inline_p
7296 #undef TARGET_SECTION_TYPE_FLAGS
7297 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7298 #undef TARGET_ASM_NAMED_SECTION
7299 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7300 #undef TARGET_INIT_BUILTINS
7301 #define TARGET_INIT_BUILTINS mep_init_builtins
7302 #undef TARGET_EXPAND_BUILTIN
7303 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7304 #undef TARGET_SCHED_ADJUST_COST
7305 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7306 #undef TARGET_SCHED_ISSUE_RATE
7307 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7308 #undef TARGET_SCHED_REORDER
7309 #define TARGET_SCHED_REORDER mep_sched_reorder
7310 #undef TARGET_STRIP_NAME_ENCODING
7311 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7312 #undef TARGET_ASM_SELECT_SECTION
7313 #define TARGET_ASM_SELECT_SECTION mep_select_section
7314 #undef TARGET_ASM_UNIQUE_SECTION
7315 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7316 #undef TARGET_ENCODE_SECTION_INFO
7317 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7318 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7319 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7320 #undef TARGET_RTX_COSTS
7321 #define TARGET_RTX_COSTS mep_rtx_cost
7322 #undef TARGET_ADDRESS_COST
7323 #define TARGET_ADDRESS_COST mep_address_cost
7324 #undef TARGET_MACHINE_DEPENDENT_REORG
7325 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7326 #undef TARGET_SETUP_INCOMING_VARARGS
7327 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7328 #undef TARGET_PASS_BY_REFERENCE
7329 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7330 #undef TARGET_FUNCTION_ARG
7331 #define TARGET_FUNCTION_ARG mep_function_arg
7332 #undef TARGET_FUNCTION_ARG_ADVANCE
7333 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7334 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7335 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7336 #undef TARGET_OPTION_OVERRIDE
7337 #define TARGET_OPTION_OVERRIDE mep_option_override
7338 #undef TARGET_ALLOCATE_INITIAL_VALUE
7339 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7340 #undef TARGET_ASM_INIT_SECTIONS
7341 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7342 #undef TARGET_RETURN_IN_MEMORY
7343 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7344 #undef TARGET_NARROW_VOLATILE_BITFIELD
7345 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7346 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7347 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7348 #undef TARGET_BUILD_BUILTIN_VA_LIST
7349 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7350 #undef TARGET_EXPAND_BUILTIN_VA_START
7351 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7352 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7353 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7354 #undef TARGET_CAN_ELIMINATE
7355 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7356 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7357 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7358 #undef TARGET_TRAMPOLINE_INIT
7359 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7360 #undef TARGET_LEGITIMATE_CONSTANT_P
7361 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7362 #undef TARGET_CAN_USE_DOLOOP_P
7363 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7365 struct gcc_target targetm = TARGET_INITIALIZER;
7367 #include "gt-mep.h"