recog_memoized works on an rtx_insn *
[official-gcc.git] / gcc / config / mep / mep.c
blobb421fe335b57cf17074da85e7100c2b99828413b
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "varasm.h"
28 #include "calls.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "insn-flags.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "recog.h"
40 #include "obstack.h"
41 #include "tree.h"
42 #include "expr.h"
43 #include "except.h"
44 #include "function.h"
45 #include "optabs.h"
46 #include "reload.h"
47 #include "tm_p.h"
48 #include "ggc.h"
49 #include "diagnostic-core.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "langhooks.h"
53 #include "df.h"
54 #include "hash-table.h"
55 #include "vec.h"
56 #include "basic-block.h"
57 #include "tree-ssa-alias.h"
58 #include "internal-fn.h"
59 #include "gimple-fold.h"
60 #include "tree-eh.h"
61 #include "gimple-expr.h"
62 #include "is-a.h"
63 #include "gimple.h"
64 #include "gimplify.h"
65 #include "opts.h"
66 #include "dumpfile.h"
67 #include "builtins.h"
69 /* Structure of this file:
71 + Command Line Option Support
72 + Pattern support - constraints, predicates, expanders
73 + Reload Support
74 + Costs
75 + Functions to save and restore machine-specific function data.
76 + Frame/Epilog/Prolog Related
77 + Operand Printing
78 + Function args in registers
79 + Handle pipeline hazards
80 + Handle attributes
81 + Trampolines
82 + Machine-dependent Reorg
83 + Builtins. */
85 /* Symbol encodings:
87 Symbols are encoded as @ <char> . <name> where <char> is one of these:
89 b - based
90 t - tiny
91 n - near
92 f - far
93 i - io, near
94 I - io, far
95 c - cb (control bus) */
97 struct GTY(()) machine_function
99 int mep_frame_pointer_needed;
101 /* For varargs. */
102 int arg_regs_to_save;
103 int regsave_filler;
104 int frame_filler;
105 int frame_locked;
107 /* Records __builtin_return address. */
108 rtx eh_stack_adjust;
110 int reg_save_size;
111 int reg_save_slot[FIRST_PSEUDO_REGISTER];
112 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
114 /* 2 if the current function has an interrupt attribute, 1 if not, 0
115 if unknown. This is here because resource.c uses EPILOGUE_USES
116 which needs it. */
117 int interrupt_handler;
119 /* Likewise, for disinterrupt attribute. */
120 int disable_interrupts;
122 /* Number of doloop tags used so far. */
123 int doloop_tags;
125 /* True if the last tag was allocated to a doloop_end. */
126 bool doloop_tag_from_end;
128 /* True if reload changes $TP. */
129 bool reload_changes_tp;
131 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
132 We only set this if the function is an interrupt handler. */
133 int asms_without_operands;
136 #define MEP_CONTROL_REG(x) \
137 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
139 static GTY(()) section * based_section;
140 static GTY(()) section * tinybss_section;
141 static GTY(()) section * far_section;
142 static GTY(()) section * farbss_section;
143 static GTY(()) section * frodata_section;
144 static GTY(()) section * srodata_section;
146 static GTY(()) section * vtext_section;
147 static GTY(()) section * vftext_section;
148 static GTY(()) section * ftext_section;
150 static void mep_set_leaf_registers (int);
151 static bool symbol_p (rtx);
152 static bool symbolref_p (rtx);
153 static void encode_pattern_1 (rtx);
154 static void encode_pattern (rtx);
155 static bool const_in_range (rtx, int, int);
156 static void mep_rewrite_mult (rtx_insn *, rtx);
157 static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
158 static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
159 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
160 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
161 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
162 static bool mep_nongeneral_reg (rtx);
163 static bool mep_general_copro_reg (rtx);
164 static bool mep_nonregister (rtx);
165 static struct machine_function* mep_init_machine_status (void);
166 static rtx mep_tp_rtx (void);
167 static rtx mep_gp_rtx (void);
168 static bool mep_interrupt_p (void);
169 static bool mep_disinterrupt_p (void);
170 static bool mep_reg_set_p (rtx, rtx);
171 static bool mep_reg_set_in_function (int);
172 static bool mep_interrupt_saved_reg (int);
173 static bool mep_call_saves_register (int);
174 static rtx_insn *F (rtx_insn *);
175 static void add_constant (int, int, int, int);
176 static rtx_insn *maybe_dead_move (rtx, rtx, bool);
177 static void mep_reload_pointer (int, const char *);
178 static void mep_start_function (FILE *, HOST_WIDE_INT);
179 static bool mep_function_ok_for_sibcall (tree, tree);
180 static int unique_bit_in (HOST_WIDE_INT);
181 static int bit_size_for_clip (HOST_WIDE_INT);
182 static int bytesize (const_tree, enum machine_mode);
183 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
184 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
185 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
186 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
187 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
188 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
189 static bool mep_function_attribute_inlinable_p (const_tree);
190 static bool mep_can_inline_p (tree, tree);
191 static bool mep_lookup_pragma_disinterrupt (const char *);
192 static int mep_multiple_address_regions (tree, bool);
193 static int mep_attrlist_to_encoding (tree, tree);
194 static void mep_insert_attributes (tree, tree *);
195 static void mep_encode_section_info (tree, rtx, int);
196 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
197 static void mep_unique_section (tree, int);
198 static unsigned int mep_section_type_flags (tree, const char *, int);
199 static void mep_asm_named_section (const char *, unsigned int, tree);
200 static bool mep_mentioned_p (rtx, rtx, int);
201 static void mep_reorg_regmove (rtx_insn *);
202 static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
203 bool, bool);
204 static void mep_reorg_repeat (rtx_insn *);
205 static bool mep_invertable_branch_p (rtx_insn *);
206 static void mep_invert_branch (rtx_insn *, rtx_insn *);
207 static void mep_reorg_erepeat (rtx_insn *);
208 static void mep_jmp_return_reorg (rtx_insn *);
209 static void mep_reorg_addcombine (rtx_insn *);
210 static void mep_reorg (void);
211 static void mep_init_intrinsics (void);
212 static void mep_init_builtins (void);
213 static void mep_intrinsic_unavailable (int);
214 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
215 static bool mep_get_move_insn (int, const struct cgen_insn **);
216 static rtx mep_convert_arg (enum machine_mode, rtx);
217 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
218 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
219 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
220 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
221 static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
222 static int mep_issue_rate (void);
223 static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
224 static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
225 static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
226 static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
227 static void mep_bundle_insns (rtx_insn *);
228 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
229 static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
230 static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
231 tree, int *, int);
232 static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
233 const_tree, bool);
234 static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
235 const_tree, bool);
236 static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
237 const_tree, bool);
238 static bool mep_vector_mode_supported_p (enum machine_mode);
239 static rtx mep_allocate_initial_value (rtx);
240 static void mep_asm_init_sections (void);
241 static int mep_comp_type_attributes (const_tree, const_tree);
242 static bool mep_narrow_volatile_bitfield (void);
243 static rtx mep_expand_builtin_saveregs (void);
244 static tree mep_build_builtin_va_list (void);
245 static void mep_expand_va_start (tree, rtx);
246 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
247 static bool mep_can_eliminate (const int, const int);
248 static void mep_conditional_register_usage (void);
249 static void mep_trampoline_init (rtx, tree, rtx);
251 #define WANT_GCC_DEFINITIONS
252 #include "mep-intrin.h"
253 #undef WANT_GCC_DEFINITIONS
256 /* Command Line Option Support. */
258 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
260 /* True if we can use cmov instructions to move values back and forth
261 between core and coprocessor registers. */
262 bool mep_have_core_copro_moves_p;
264 /* True if we can use cmov instructions (or a work-alike) to move
265 values between coprocessor registers. */
266 bool mep_have_copro_copro_moves_p;
268 /* A table of all coprocessor instructions that can act like
269 a coprocessor-to-coprocessor cmov. */
270 static const int mep_cmov_insns[] = {
271 mep_cmov,
272 mep_cpmov,
273 mep_fmovs,
274 mep_caddi3,
275 mep_csubi3,
276 mep_candi3,
277 mep_cori3,
278 mep_cxori3,
279 mep_cand3,
280 mep_cor3
284 static void
285 mep_set_leaf_registers (int enable)
287 int i;
289 if (mep_leaf_registers[0] != enable)
290 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
291 mep_leaf_registers[i] = enable;
294 static void
295 mep_conditional_register_usage (void)
297 int i;
299 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
301 fixed_regs[HI_REGNO] = 1;
302 fixed_regs[LO_REGNO] = 1;
303 call_used_regs[HI_REGNO] = 1;
304 call_used_regs[LO_REGNO] = 1;
307 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
308 global_regs[i] = 1;
311 static void
312 mep_option_override (void)
314 unsigned int i;
315 int j;
316 cl_deferred_option *opt;
317 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
319 if (v)
320 FOR_EACH_VEC_ELT (*v, i, opt)
322 switch (opt->opt_index)
324 case OPT_mivc2:
325 for (j = 0; j < 32; j++)
326 fixed_regs[j + 48] = 0;
327 for (j = 0; j < 32; j++)
328 call_used_regs[j + 48] = 1;
329 for (j = 6; j < 8; j++)
330 call_used_regs[j + 48] = 0;
332 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
333 RN (0, "$csar0");
334 RN (1, "$cc");
335 RN (4, "$cofr0");
336 RN (5, "$cofr1");
337 RN (6, "$cofa0");
338 RN (7, "$cofa1");
339 RN (15, "$csar1");
341 RN (16, "$acc0_0");
342 RN (17, "$acc0_1");
343 RN (18, "$acc0_2");
344 RN (19, "$acc0_3");
345 RN (20, "$acc0_4");
346 RN (21, "$acc0_5");
347 RN (22, "$acc0_6");
348 RN (23, "$acc0_7");
350 RN (24, "$acc1_0");
351 RN (25, "$acc1_1");
352 RN (26, "$acc1_2");
353 RN (27, "$acc1_3");
354 RN (28, "$acc1_4");
355 RN (29, "$acc1_5");
356 RN (30, "$acc1_6");
357 RN (31, "$acc1_7");
358 #undef RN
359 break;
361 default:
362 gcc_unreachable ();
366 if (flag_pic == 1)
367 warning (OPT_fpic, "-fpic is not supported");
368 if (flag_pic == 2)
369 warning (OPT_fPIC, "-fPIC is not supported");
370 if (TARGET_S && TARGET_M)
371 error ("only one of -ms and -mm may be given");
372 if (TARGET_S && TARGET_L)
373 error ("only one of -ms and -ml may be given");
374 if (TARGET_M && TARGET_L)
375 error ("only one of -mm and -ml may be given");
376 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
377 error ("only one of -ms and -mtiny= may be given");
378 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
379 error ("only one of -mm and -mtiny= may be given");
380 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
381 warning (0, "-mclip currently has no effect without -mminmax");
383 if (mep_const_section)
385 if (strcmp (mep_const_section, "tiny") != 0
386 && strcmp (mep_const_section, "near") != 0
387 && strcmp (mep_const_section, "far") != 0)
388 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
391 if (TARGET_S)
392 mep_tiny_cutoff = 65536;
393 if (TARGET_M)
394 mep_tiny_cutoff = 0;
395 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
396 mep_tiny_cutoff = 0;
398 if (TARGET_64BIT_CR_REGS)
399 flag_split_wide_types = 0;
401 init_machine_status = mep_init_machine_status;
402 mep_init_intrinsics ();
405 /* Pattern Support - constraints, predicates, expanders. */
407 /* MEP has very few instructions that can refer to the span of
408 addresses used by symbols, so it's common to check for them. */
410 static bool
411 symbol_p (rtx x)
413 int c = GET_CODE (x);
415 return (c == CONST_INT
416 || c == CONST
417 || c == SYMBOL_REF);
420 static bool
421 symbolref_p (rtx x)
423 int c;
425 if (GET_CODE (x) != MEM)
426 return false;
428 c = GET_CODE (XEXP (x, 0));
429 return (c == CONST_INT
430 || c == CONST
431 || c == SYMBOL_REF);
434 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
436 #define GEN_REG(R, STRICT) \
437 (GR_REGNO_P (R) \
438 || (!STRICT \
439 && ((R) == ARG_POINTER_REGNUM \
440 || (R) >= FIRST_PSEUDO_REGISTER)))
442 static char pattern[12], *patternp;
443 static GTY(()) rtx patternr[12];
444 #define RTX_IS(x) (strcmp (pattern, x) == 0)
446 static void
447 encode_pattern_1 (rtx x)
449 int i;
451 if (patternp == pattern + sizeof (pattern) - 2)
453 patternp[-1] = '?';
454 return;
457 patternr[patternp-pattern] = x;
459 switch (GET_CODE (x))
461 case REG:
462 *patternp++ = 'r';
463 break;
464 case MEM:
465 *patternp++ = 'm';
466 case CONST:
467 encode_pattern_1 (XEXP(x, 0));
468 break;
469 case PLUS:
470 *patternp++ = '+';
471 encode_pattern_1 (XEXP(x, 0));
472 encode_pattern_1 (XEXP(x, 1));
473 break;
474 case LO_SUM:
475 *patternp++ = 'L';
476 encode_pattern_1 (XEXP(x, 0));
477 encode_pattern_1 (XEXP(x, 1));
478 break;
479 case HIGH:
480 *patternp++ = 'H';
481 encode_pattern_1 (XEXP(x, 0));
482 break;
483 case SYMBOL_REF:
484 *patternp++ = 's';
485 break;
486 case LABEL_REF:
487 *patternp++ = 'l';
488 break;
489 case CONST_INT:
490 case CONST_DOUBLE:
491 *patternp++ = 'i';
492 break;
493 case UNSPEC:
494 *patternp++ = 'u';
495 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
496 for (i=0; i<XVECLEN (x, 0); i++)
497 encode_pattern_1 (XVECEXP (x, 0, i));
498 break;
499 case USE:
500 *patternp++ = 'U';
501 break;
502 default:
503 *patternp++ = '?';
504 #if 0
505 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
506 debug_rtx (x);
507 gcc_unreachable ();
508 #endif
509 break;
513 static void
514 encode_pattern (rtx x)
516 patternp = pattern;
517 encode_pattern_1 (x);
518 *patternp = 0;
522 mep_section_tag (rtx x)
524 const char *name;
526 while (1)
528 switch (GET_CODE (x))
530 case MEM:
531 case CONST:
532 x = XEXP (x, 0);
533 break;
534 case UNSPEC:
535 x = XVECEXP (x, 0, 0);
536 break;
537 case PLUS:
538 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
539 return 0;
540 x = XEXP (x, 0);
541 break;
542 default:
543 goto done;
546 done:
547 if (GET_CODE (x) != SYMBOL_REF)
548 return 0;
549 name = XSTR (x, 0);
550 if (name[0] == '@' && name[2] == '.')
552 if (name[1] == 'i' || name[1] == 'I')
554 if (name[1] == 'I')
555 return 'f'; /* near */
556 return 'n'; /* far */
558 return name[1];
560 return 0;
564 mep_regno_reg_class (int regno)
566 switch (regno)
568 case SP_REGNO: return SP_REGS;
569 case TP_REGNO: return TP_REGS;
570 case GP_REGNO: return GP_REGS;
571 case 0: return R0_REGS;
572 case HI_REGNO: return HI_REGS;
573 case LO_REGNO: return LO_REGS;
574 case ARG_POINTER_REGNUM: return GENERAL_REGS;
577 if (GR_REGNO_P (regno))
578 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
579 if (CONTROL_REGNO_P (regno))
580 return CONTROL_REGS;
582 if (CR_REGNO_P (regno))
584 int i, j;
586 /* Search for the register amongst user-defined subclasses of
587 the coprocessor registers. */
588 for (i = USER0_REGS; i <= USER3_REGS; ++i)
590 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
591 continue;
592 for (j = 0; j < N_REG_CLASSES; ++j)
594 enum reg_class sub = reg_class_subclasses[i][j];
596 if (sub == LIM_REG_CLASSES)
597 return i;
598 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
599 break;
603 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
606 if (CCR_REGNO_P (regno))
607 return CCR_REGS;
609 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
610 return NO_REGS;
613 static bool
614 const_in_range (rtx x, int minv, int maxv)
616 return (GET_CODE (x) == CONST_INT
617 && INTVAL (x) >= minv
618 && INTVAL (x) <= maxv);
621 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
622 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
623 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
624 at the end of the insn stream. */
627 mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
629 if (rtx_equal_p (dest, src1))
630 return src2;
631 else if (rtx_equal_p (dest, src2))
632 return src1;
633 else
635 if (insn == 0)
636 emit_insn (gen_movsi (copy_rtx (dest), src1));
637 else
638 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
639 return src2;
643 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
644 Change the last element of PATTERN from (clobber (scratch:SI))
645 to (clobber (reg:SI HI_REGNO)). */
647 static void
648 mep_rewrite_mult (rtx_insn *insn, rtx pattern)
650 rtx hi_clobber;
652 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
653 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
654 PATTERN (insn) = pattern;
655 INSN_CODE (insn) = -1;
658 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
659 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
660 store the result in DEST if nonnull. */
662 static void
663 mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
665 rtx lo, pattern;
667 lo = gen_rtx_REG (SImode, LO_REGNO);
668 if (dest)
669 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
670 mep_mulr_source (insn, dest, src1, src2));
671 else
672 pattern = gen_mulsi3_lo (lo, src1, src2);
673 mep_rewrite_mult (insn, pattern);
676 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
677 SRC3 into $lo, then use either madd or maddr. The move into $lo will
678 be deleted by a peephole2 if SRC3 is already in $lo. */
680 static void
681 mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
683 rtx lo, pattern;
685 lo = gen_rtx_REG (SImode, LO_REGNO);
686 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
687 if (dest)
688 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
689 mep_mulr_source (insn, dest, src1, src2),
690 copy_rtx (lo));
691 else
692 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
693 mep_rewrite_mult (insn, pattern);
696 /* Return true if $lo has the same value as integer register GPR when
697 instruction INSN is reached. If necessary, rewrite the instruction
698 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
699 rtx for (reg:SI LO_REGNO).
701 This function is intended to be used by the peephole2 pass. Since
702 that pass goes from the end of a basic block to the beginning, and
703 propagates liveness information on the way, there is no need to
704 update register notes here.
706 If GPR_DEAD_P is true on entry, and this function returns true,
707 then the caller will replace _every_ use of GPR in and after INSN
708 with LO. This means that if the instruction that sets $lo is a
709 mulr- or maddr-type instruction, we can rewrite it to use mul or
710 madd instead. In combination with the copy progagation pass,
711 this allows us to replace sequences like:
713 mov GPR,R1
714 mulr GPR,R2
716 with:
718 mul R1,R2
720 if GPR is no longer used. */
722 static bool
723 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
727 insn = PREV_INSN (insn);
728 if (INSN_P (insn))
729 switch (recog_memoized (insn))
731 case CODE_FOR_mulsi3_1:
732 extract_insn (insn);
733 if (rtx_equal_p (recog_data.operand[0], gpr))
735 mep_rewrite_mulsi3 (insn,
736 gpr_dead_p ? NULL : recog_data.operand[0],
737 recog_data.operand[1],
738 recog_data.operand[2]);
739 return true;
741 return false;
743 case CODE_FOR_maddsi3:
744 extract_insn (insn);
745 if (rtx_equal_p (recog_data.operand[0], gpr))
747 mep_rewrite_maddsi3 (insn,
748 gpr_dead_p ? NULL : recog_data.operand[0],
749 recog_data.operand[1],
750 recog_data.operand[2],
751 recog_data.operand[3]);
752 return true;
754 return false;
756 case CODE_FOR_mulsi3r:
757 case CODE_FOR_maddsi3r:
758 extract_insn (insn);
759 return rtx_equal_p (recog_data.operand[1], gpr);
761 default:
762 if (reg_set_p (lo, insn)
763 || reg_set_p (gpr, insn)
764 || volatile_insn_p (PATTERN (insn)))
765 return false;
767 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
768 gpr_dead_p = false;
769 break;
772 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
773 return false;
776 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
778 bool
779 mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
781 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
782 extract_insn (insn);
783 return result;
786 /* Return true if SET can be turned into a post-modify load or store
787 that adds OFFSET to GPR. In other words, return true if SET can be
788 changed into:
790 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
792 It's OK to change SET to an equivalent operation in order to
793 make it match. */
795 static bool
796 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
798 rtx *reg, *mem;
799 unsigned int reg_bytes, mem_bytes;
800 enum machine_mode reg_mode, mem_mode;
802 /* Only simple SETs can be converted. */
803 if (GET_CODE (set) != SET)
804 return false;
806 /* Point REG to what we hope will be the register side of the set and
807 MEM to what we hope will be the memory side. */
808 if (GET_CODE (SET_DEST (set)) == MEM)
810 mem = &SET_DEST (set);
811 reg = &SET_SRC (set);
813 else
815 reg = &SET_DEST (set);
816 mem = &SET_SRC (set);
817 if (GET_CODE (*mem) == SIGN_EXTEND)
818 mem = &XEXP (*mem, 0);
821 /* Check that *REG is a suitable coprocessor register. */
822 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
823 return false;
825 /* Check that *MEM is a suitable memory reference. */
826 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
827 return false;
829 /* Get the number of bytes in each operand. */
830 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
831 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
833 /* Check that OFFSET is suitably aligned. */
834 if (INTVAL (offset) & (mem_bytes - 1))
835 return false;
837 /* Convert *MEM to a normal integer mode. */
838 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
839 *mem = change_address (*mem, mem_mode, NULL);
841 /* Adjust *REG as well. */
842 *reg = shallow_copy_rtx (*reg);
843 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
845 /* SET is a subword load. Convert it to an explicit extension. */
846 PUT_MODE (*reg, SImode);
847 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
849 else
851 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
852 PUT_MODE (*reg, reg_mode);
854 return true;
857 /* Return the effect of frame-related instruction INSN. */
859 static rtx
860 mep_frame_expr (rtx_insn *insn)
862 rtx note, expr;
864 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
865 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
866 RTX_FRAME_RELATED_P (expr) = 1;
867 return expr;
870 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
871 new pattern in INSN1; INSN2 will be deleted by the caller. */
873 static void
874 mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
876 rtx expr;
878 if (RTX_FRAME_RELATED_P (insn2))
880 expr = mep_frame_expr (insn2);
881 if (RTX_FRAME_RELATED_P (insn1))
882 expr = gen_rtx_SEQUENCE (VOIDmode,
883 gen_rtvec (2, mep_frame_expr (insn1), expr));
884 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
885 RTX_FRAME_RELATED_P (insn1) = 1;
888 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
889 gen_rtvec (2, PATTERN (insn1),
890 PATTERN (insn2)));
891 INSN_CODE (insn1) = -1;
894 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
895 the basic block to see if any previous load or store instruction can
896 be persuaded to do SET_INSN as a side-effect. Return true if so. */
898 static bool
899 mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
901 rtx_insn *insn;
903 insn = set_insn;
906 insn = PREV_INSN (insn);
907 if (INSN_P (insn))
909 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
911 mep_make_parallel (insn, set_insn);
912 return true;
915 if (reg_set_p (reg, insn)
916 || reg_referenced_p (reg, PATTERN (insn))
917 || volatile_insn_p (PATTERN (insn)))
918 return false;
921 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
922 return false;
925 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
927 bool
928 mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
930 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
931 extract_insn (insn);
932 return result;
935 bool
936 mep_allow_clip (rtx ux, rtx lx, int s)
938 HOST_WIDE_INT u = INTVAL (ux);
939 HOST_WIDE_INT l = INTVAL (lx);
940 int i;
942 if (!TARGET_OPT_CLIP)
943 return false;
945 if (s)
947 for (i = 0; i < 30; i ++)
948 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
949 && (l == - ((HOST_WIDE_INT) 1 << i)))
950 return true;
952 else
954 if (l != 0)
955 return false;
957 for (i = 0; i < 30; i ++)
958 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
959 return true;
961 return false;
964 bool
965 mep_bit_position_p (rtx x, bool looking_for)
967 if (GET_CODE (x) != CONST_INT)
968 return false;
969 switch ((int) INTVAL(x) & 0xff)
971 case 0x01: case 0x02: case 0x04: case 0x08:
972 case 0x10: case 0x20: case 0x40: case 0x80:
973 return looking_for;
974 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
975 case 0xef: case 0xdf: case 0xbf: case 0x7f:
976 return !looking_for;
978 return false;
981 static bool
982 move_needs_splitting (rtx dest, rtx src,
983 enum machine_mode mode ATTRIBUTE_UNUSED)
985 int s = mep_section_tag (src);
987 while (1)
989 if (GET_CODE (src) == CONST
990 || GET_CODE (src) == MEM)
991 src = XEXP (src, 0);
992 else if (GET_CODE (src) == SYMBOL_REF
993 || GET_CODE (src) == LABEL_REF
994 || GET_CODE (src) == PLUS)
995 break;
996 else
997 return false;
999 if (s == 'f'
1000 || (GET_CODE (src) == PLUS
1001 && GET_CODE (XEXP (src, 1)) == CONST_INT
1002 && (INTVAL (XEXP (src, 1)) < -65536
1003 || INTVAL (XEXP (src, 1)) > 0xffffff))
1004 || (GET_CODE (dest) == REG
1005 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1006 return true;
1007 return false;
1010 bool
1011 mep_split_mov (rtx *operands, int symbolic)
1013 if (symbolic)
1015 if (move_needs_splitting (operands[0], operands[1], SImode))
1016 return true;
1017 return false;
1020 if (GET_CODE (operands[1]) != CONST_INT)
1021 return false;
1023 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1024 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1025 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1026 return false;
1028 if (((!reload_completed && !reload_in_progress)
1029 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1030 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1031 return false;
1033 return true;
1036 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1037 it to one specific value. So the insn chosen depends on whether
1038 the source and destination modes match. */
1040 bool
1041 mep_vliw_mode_match (rtx tgt)
1043 bool src_vliw = mep_vliw_function_p (cfun->decl);
1044 bool tgt_vliw = INTVAL (tgt);
1046 return src_vliw == tgt_vliw;
1049 /* Like the above, but also test for near/far mismatches. */
1051 bool
1052 mep_vliw_jmp_match (rtx tgt)
1054 bool src_vliw = mep_vliw_function_p (cfun->decl);
1055 bool tgt_vliw = INTVAL (tgt);
1057 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1058 return false;
1060 return src_vliw == tgt_vliw;
1063 bool
1064 mep_multi_slot (rtx x)
1066 return get_attr_slot (x) == SLOT_MULTI;
1069 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1071 static bool
1072 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1074 /* We can't convert symbol values to gp- or tp-rel values after
1075 reload, as reload might have used $gp or $tp for other
1076 purposes. */
1077 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1079 char e = mep_section_tag (x);
1080 return (e != 't' && e != 'b');
1082 return 1;
1085 /* Be careful not to use macros that need to be compiled one way for
1086 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1088 bool
1089 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1091 int the_tag;
1093 #define DEBUG_LEGIT 0
1094 #if DEBUG_LEGIT
1095 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1096 debug_rtx (x);
1097 #endif
1099 if (GET_CODE (x) == LO_SUM
1100 && GET_CODE (XEXP (x, 0)) == REG
1101 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1102 && CONSTANT_P (XEXP (x, 1)))
1104 if (GET_MODE_SIZE (mode) > 4)
1106 /* We will end up splitting this, and lo_sums are not
1107 offsettable for us. */
1108 #if DEBUG_LEGIT
1109 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1110 #endif
1111 return false;
1113 #if DEBUG_LEGIT
1114 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1115 #endif
1116 return true;
1119 if (GET_CODE (x) == REG
1120 && GEN_REG (REGNO (x), strict))
1122 #if DEBUG_LEGIT
1123 fprintf (stderr, " - yup, [reg]\n");
1124 #endif
1125 return true;
1128 if (GET_CODE (x) == PLUS
1129 && GET_CODE (XEXP (x, 0)) == REG
1130 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1131 && const_in_range (XEXP (x, 1), -32768, 32767))
1133 #if DEBUG_LEGIT
1134 fprintf (stderr, " - yup, [reg+const]\n");
1135 #endif
1136 return true;
1139 if (GET_CODE (x) == PLUS
1140 && GET_CODE (XEXP (x, 0)) == REG
1141 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1142 && GET_CODE (XEXP (x, 1)) == CONST
1143 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1144 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1145 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1146 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1148 #if DEBUG_LEGIT
1149 fprintf (stderr, " - yup, [reg+unspec]\n");
1150 #endif
1151 return true;
1154 the_tag = mep_section_tag (x);
1156 if (the_tag == 'f')
1158 #if DEBUG_LEGIT
1159 fprintf (stderr, " - nope, [far]\n");
1160 #endif
1161 return false;
1164 if (mode == VOIDmode
1165 && GET_CODE (x) == SYMBOL_REF)
1167 #if DEBUG_LEGIT
1168 fprintf (stderr, " - yup, call [symbol]\n");
1169 #endif
1170 return true;
1173 if ((mode == SImode || mode == SFmode)
1174 && CONSTANT_P (x)
1175 && mep_legitimate_constant_p (mode, x)
1176 && the_tag != 't' && the_tag != 'b')
1178 if (GET_CODE (x) != CONST_INT
1179 || (INTVAL (x) <= 0xfffff
1180 && INTVAL (x) >= 0
1181 && (INTVAL (x) % 4) == 0))
1183 #if DEBUG_LEGIT
1184 fprintf (stderr, " - yup, [const]\n");
1185 #endif
1186 return true;
1190 #if DEBUG_LEGIT
1191 fprintf (stderr, " - nope.\n");
1192 #endif
1193 return false;
1197 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1198 int type_i,
1199 int ind_levels ATTRIBUTE_UNUSED)
1201 enum reload_type type = (enum reload_type) type_i;
1203 if (GET_CODE (*x) == PLUS
1204 && GET_CODE (XEXP (*x, 0)) == MEM
1205 && GET_CODE (XEXP (*x, 1)) == REG)
1207 /* GCC will by default copy the MEM into a REG, which results in
1208 an invalid address. For us, the best thing to do is move the
1209 whole expression to a REG. */
1210 push_reload (*x, NULL_RTX, x, NULL,
1211 GENERAL_REGS, mode, VOIDmode,
1212 0, 0, opnum, type);
1213 return 1;
1216 if (GET_CODE (*x) == PLUS
1217 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1218 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1220 char e = mep_section_tag (XEXP (*x, 0));
1222 if (e != 't' && e != 'b')
1224 /* GCC thinks that (sym+const) is a valid address. Well,
1225 sometimes it is, this time it isn't. The best thing to
1226 do is reload the symbol to a register, since reg+int
1227 tends to work, and we can't just add the symbol and
1228 constant anyway. */
1229 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1230 GENERAL_REGS, mode, VOIDmode,
1231 0, 0, opnum, type);
1232 return 1;
1235 return 0;
1239 mep_core_address_length (rtx_insn *insn, int opn)
1241 rtx set = single_set (insn);
1242 rtx mem = XEXP (set, opn);
1243 rtx other = XEXP (set, 1-opn);
1244 rtx addr = XEXP (mem, 0);
1246 if (register_operand (addr, Pmode))
1247 return 2;
1248 if (GET_CODE (addr) == PLUS)
1250 rtx addend = XEXP (addr, 1);
1252 gcc_assert (REG_P (XEXP (addr, 0)));
1254 switch (REGNO (XEXP (addr, 0)))
1256 case STACK_POINTER_REGNUM:
1257 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1258 && mep_imm7a4_operand (addend, VOIDmode))
1259 return 2;
1260 break;
1262 case 13: /* TP */
1263 gcc_assert (REG_P (other));
1265 if (REGNO (other) >= 8)
1266 break;
1268 if (GET_CODE (addend) == CONST
1269 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1270 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1271 return 2;
1273 if (GET_CODE (addend) == CONST_INT
1274 && INTVAL (addend) >= 0
1275 && INTVAL (addend) <= 127
1276 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1277 return 2;
1278 break;
1282 return 4;
1286 mep_cop_address_length (rtx_insn *insn, int opn)
1288 rtx set = single_set (insn);
1289 rtx mem = XEXP (set, opn);
1290 rtx addr = XEXP (mem, 0);
1292 if (GET_CODE (mem) != MEM)
1293 return 2;
1294 if (register_operand (addr, Pmode))
1295 return 2;
1296 if (GET_CODE (addr) == POST_INC)
1297 return 2;
1299 return 4;
1302 #define DEBUG_EXPAND_MOV 0
1303 bool
1304 mep_expand_mov (rtx *operands, enum machine_mode mode)
1306 int i, t;
1307 int tag[2];
1308 rtx tpsym, tpoffs;
1309 int post_reload = 0;
1311 tag[0] = mep_section_tag (operands[0]);
1312 tag[1] = mep_section_tag (operands[1]);
1314 if (!reload_in_progress
1315 && !reload_completed
1316 && GET_CODE (operands[0]) != REG
1317 && GET_CODE (operands[0]) != SUBREG
1318 && GET_CODE (operands[1]) != REG
1319 && GET_CODE (operands[1]) != SUBREG)
1320 operands[1] = copy_to_mode_reg (mode, operands[1]);
1322 #if DEBUG_EXPAND_MOV
1323 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1324 reload_in_progress || reload_completed);
1325 debug_rtx (operands[0]);
1326 debug_rtx (operands[1]);
1327 #endif
1329 if (mode == DImode || mode == DFmode)
1330 return false;
1332 if (reload_in_progress || reload_completed)
1334 rtx r;
1336 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1337 cfun->machine->reload_changes_tp = true;
1339 if (tag[0] == 't' || tag[1] == 't')
1341 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1342 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1343 post_reload = 1;
1345 if (tag[0] == 'b' || tag[1] == 'b')
1347 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1348 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1349 post_reload = 1;
1351 if (cfun->machine->reload_changes_tp == true)
1352 post_reload = 1;
1355 if (!post_reload)
1357 rtx n;
1358 if (symbol_p (operands[1]))
1360 t = mep_section_tag (operands[1]);
1361 if (t == 'b' || t == 't')
1364 if (GET_CODE (operands[1]) == SYMBOL_REF)
1366 tpsym = operands[1];
1367 n = gen_rtx_UNSPEC (mode,
1368 gen_rtvec (1, operands[1]),
1369 t == 'b' ? UNS_TPREL : UNS_GPREL);
1370 n = gen_rtx_CONST (mode, n);
1372 else if (GET_CODE (operands[1]) == CONST
1373 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1374 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1375 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1377 tpsym = XEXP (XEXP (operands[1], 0), 0);
1378 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1379 n = gen_rtx_UNSPEC (mode,
1380 gen_rtvec (1, tpsym),
1381 t == 'b' ? UNS_TPREL : UNS_GPREL);
1382 n = gen_rtx_PLUS (mode, n, tpoffs);
1383 n = gen_rtx_CONST (mode, n);
1385 else if (GET_CODE (operands[1]) == CONST
1386 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1387 return false;
1388 else
1390 error ("unusual TP-relative address");
1391 return false;
1394 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1395 : mep_gp_rtx ()), n);
1396 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1397 #if DEBUG_EXPAND_MOV
1398 fprintf(stderr, "mep_expand_mov emitting ");
1399 debug_rtx(n);
1400 #endif
1401 return true;
1405 for (i=0; i < 2; i++)
1407 t = mep_section_tag (operands[i]);
1408 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1410 rtx sym, n, r;
1411 int u;
1413 sym = XEXP (operands[i], 0);
1414 if (GET_CODE (sym) == CONST
1415 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1416 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1418 if (t == 'b')
1420 r = mep_tp_rtx ();
1421 u = UNS_TPREL;
1423 else
1425 r = mep_gp_rtx ();
1426 u = UNS_GPREL;
1429 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1430 n = gen_rtx_CONST (Pmode, n);
1431 n = gen_rtx_PLUS (Pmode, r, n);
1432 operands[i] = replace_equiv_address (operands[i], n);
1437 if ((GET_CODE (operands[1]) != REG
1438 && MEP_CONTROL_REG (operands[0]))
1439 || (GET_CODE (operands[0]) != REG
1440 && MEP_CONTROL_REG (operands[1])))
1442 rtx temp;
1443 #if DEBUG_EXPAND_MOV
1444 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1445 #endif
1446 temp = gen_reg_rtx (mode);
1447 emit_move_insn (temp, operands[1]);
1448 operands[1] = temp;
1451 if (symbolref_p (operands[0])
1452 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1453 || (GET_MODE_SIZE (mode) != 4)))
1455 rtx temp;
1457 gcc_assert (!reload_in_progress && !reload_completed);
1459 temp = force_reg (Pmode, XEXP (operands[0], 0));
1460 operands[0] = replace_equiv_address (operands[0], temp);
1461 emit_move_insn (operands[0], operands[1]);
1462 return true;
1465 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1466 tag[1] = 0;
1468 if (symbol_p (operands[1])
1469 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1471 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1472 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1473 return true;
1476 if (symbolref_p (operands[1])
1477 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1479 rtx temp;
1481 if (reload_in_progress || reload_completed)
1482 temp = operands[0];
1483 else
1484 temp = gen_reg_rtx (Pmode);
1486 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1487 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1488 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1489 return true;
1492 return false;
1495 /* Cases where the pattern can't be made to use at all. */
1497 bool
1498 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1500 int i;
1502 #define DEBUG_MOV_OK 0
1503 #if DEBUG_MOV_OK
1504 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1505 mep_section_tag (operands[1]));
1506 debug_rtx (operands[0]);
1507 debug_rtx (operands[1]);
1508 #endif
1510 /* We want the movh patterns to get these. */
1511 if (GET_CODE (operands[1]) == HIGH)
1512 return false;
1514 /* We can't store a register to a far variable without using a
1515 scratch register to hold the address. Using far variables should
1516 be split by mep_emit_mov anyway. */
1517 if (mep_section_tag (operands[0]) == 'f'
1518 || mep_section_tag (operands[1]) == 'f')
1520 #if DEBUG_MOV_OK
1521 fprintf (stderr, " - no, f\n");
1522 #endif
1523 return false;
1525 i = mep_section_tag (operands[1]);
1526 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1527 /* These are supposed to be generated with adds of the appropriate
1528 register. During and after reload, however, we allow them to
1529 be accessed as normal symbols because adding a dependency on
1530 the base register now might cause problems. */
1532 #if DEBUG_MOV_OK
1533 fprintf (stderr, " - no, bt\n");
1534 #endif
1535 return false;
1538 /* The only moves we can allow involve at least one general
1539 register, so require it. */
1540 for (i = 0; i < 2; i ++)
1542 /* Allow subregs too, before reload. */
1543 rtx x = operands[i];
1545 if (GET_CODE (x) == SUBREG)
1546 x = XEXP (x, 0);
1547 if (GET_CODE (x) == REG
1548 && ! MEP_CONTROL_REG (x))
1550 #if DEBUG_MOV_OK
1551 fprintf (stderr, " - ok\n");
1552 #endif
1553 return true;
1556 #if DEBUG_MOV_OK
1557 fprintf (stderr, " - no, no gen reg\n");
1558 #endif
1559 return false;
1562 #define DEBUG_SPLIT_WIDE_MOVE 0
1563 void
1564 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1566 int i;
1568 #if DEBUG_SPLIT_WIDE_MOVE
1569 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1570 debug_rtx (operands[0]);
1571 debug_rtx (operands[1]);
1572 #endif
1574 for (i = 0; i <= 1; i++)
1576 rtx op = operands[i], hi, lo;
1578 switch (GET_CODE (op))
1580 case REG:
1582 unsigned int regno = REGNO (op);
1584 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1586 rtx i32;
1588 lo = gen_rtx_REG (SImode, regno);
1589 i32 = GEN_INT (32);
1590 hi = gen_rtx_ZERO_EXTRACT (SImode,
1591 gen_rtx_REG (DImode, regno),
1592 i32, i32);
1594 else
1596 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1597 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1600 break;
1602 case CONST_INT:
1603 case CONST_DOUBLE:
1604 case MEM:
1605 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1606 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1607 break;
1609 default:
1610 gcc_unreachable ();
1613 /* The high part of CR <- GPR moves must be done after the low part. */
1614 operands [i + 4] = lo;
1615 operands [i + 2] = hi;
1618 if (reg_mentioned_p (operands[2], operands[5])
1619 || GET_CODE (operands[2]) == ZERO_EXTRACT
1620 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1622 rtx tmp;
1624 /* Overlapping register pairs -- make sure we don't
1625 early-clobber ourselves. */
1626 tmp = operands[2];
1627 operands[2] = operands[4];
1628 operands[4] = tmp;
1629 tmp = operands[3];
1630 operands[3] = operands[5];
1631 operands[5] = tmp;
1634 #if DEBUG_SPLIT_WIDE_MOVE
1635 fprintf(stderr, "\033[34m");
1636 debug_rtx (operands[2]);
1637 debug_rtx (operands[3]);
1638 debug_rtx (operands[4]);
1639 debug_rtx (operands[5]);
1640 fprintf(stderr, "\033[0m");
1641 #endif
1644 /* Emit a setcc instruction in its entirity. */
1646 static bool
1647 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1649 rtx tmp;
1651 switch (code)
1653 case GT:
1654 case GTU:
1655 tmp = op1, op1 = op2, op2 = tmp;
1656 code = swap_condition (code);
1657 /* FALLTHRU */
1659 case LT:
1660 case LTU:
1661 op1 = force_reg (SImode, op1);
1662 emit_insn (gen_rtx_SET (VOIDmode, dest,
1663 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1664 return true;
1666 case EQ:
1667 if (op2 != const0_rtx)
1668 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1669 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1670 return true;
1672 case NE:
1673 /* Branchful sequence:
1674 mov dest, 0 16-bit
1675 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1676 mov dest, 1 16-bit
1678 Branchless sequence:
1679 add3 tmp, op1, -op2 32-bit (or mov + sub)
1680 sltu3 tmp, tmp, 1 16-bit
1681 xor3 dest, tmp, 1 32-bit
1683 if (optimize_size && op2 != const0_rtx)
1684 return false;
1686 if (op2 != const0_rtx)
1687 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1689 op2 = gen_reg_rtx (SImode);
1690 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1692 emit_insn (gen_rtx_SET (VOIDmode, dest,
1693 gen_rtx_XOR (SImode, op2, const1_rtx)));
1694 return true;
1696 case LE:
1697 if (GET_CODE (op2) != CONST_INT
1698 || INTVAL (op2) == 0x7ffffff)
1699 return false;
1700 op2 = GEN_INT (INTVAL (op2) + 1);
1701 return mep_expand_setcc_1 (LT, dest, op1, op2);
1703 case LEU:
1704 if (GET_CODE (op2) != CONST_INT
1705 || INTVAL (op2) == -1)
1706 return false;
1707 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1708 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1710 case GE:
1711 if (GET_CODE (op2) != CONST_INT
1712 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1713 return false;
1714 op2 = GEN_INT (INTVAL (op2) - 1);
1715 return mep_expand_setcc_1 (GT, dest, op1, op2);
1717 case GEU:
1718 if (GET_CODE (op2) != CONST_INT
1719 || op2 == const0_rtx)
1720 return false;
1721 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1722 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1724 default:
1725 gcc_unreachable ();
1729 bool
1730 mep_expand_setcc (rtx *operands)
1732 rtx dest = operands[0];
1733 enum rtx_code code = GET_CODE (operands[1]);
1734 rtx op0 = operands[2];
1735 rtx op1 = operands[3];
1737 return mep_expand_setcc_1 (code, dest, op0, op1);
1741 mep_expand_cbranch (rtx *operands)
1743 enum rtx_code code = GET_CODE (operands[0]);
1744 rtx op0 = operands[1];
1745 rtx op1 = operands[2];
1746 rtx tmp;
1748 restart:
1749 switch (code)
1751 case LT:
1752 if (mep_imm4_operand (op1, SImode))
1753 break;
1755 tmp = gen_reg_rtx (SImode);
1756 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1757 code = NE;
1758 op0 = tmp;
1759 op1 = const0_rtx;
1760 break;
1762 case GE:
1763 if (mep_imm4_operand (op1, SImode))
1764 break;
1766 tmp = gen_reg_rtx (SImode);
1767 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1769 code = EQ;
1770 op0 = tmp;
1771 op1 = const0_rtx;
1772 break;
1774 case EQ:
1775 case NE:
1776 if (! mep_reg_or_imm4_operand (op1, SImode))
1777 op1 = force_reg (SImode, op1);
1778 break;
1780 case LE:
1781 case GT:
1782 if (GET_CODE (op1) == CONST_INT
1783 && INTVAL (op1) != 0x7fffffff)
1785 op1 = GEN_INT (INTVAL (op1) + 1);
1786 code = (code == LE ? LT : GE);
1787 goto restart;
1790 tmp = gen_reg_rtx (SImode);
1791 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1793 code = (code == LE ? EQ : NE);
1794 op0 = tmp;
1795 op1 = const0_rtx;
1796 break;
1798 case LTU:
1799 if (op1 == const1_rtx)
1801 code = EQ;
1802 op1 = const0_rtx;
1803 break;
1806 tmp = gen_reg_rtx (SImode);
1807 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1808 code = NE;
1809 op0 = tmp;
1810 op1 = const0_rtx;
1811 break;
1813 case LEU:
1814 tmp = gen_reg_rtx (SImode);
1815 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1816 code = NE;
1817 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1818 code = EQ;
1819 else
1820 gcc_unreachable ();
1821 op0 = tmp;
1822 op1 = const0_rtx;
1823 break;
1825 case GTU:
1826 tmp = gen_reg_rtx (SImode);
1827 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1828 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1829 code = NE;
1830 op0 = tmp;
1831 op1 = const0_rtx;
1832 break;
1834 case GEU:
1835 tmp = gen_reg_rtx (SImode);
1836 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1837 code = NE;
1838 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1839 code = EQ;
1840 else
1841 gcc_unreachable ();
1842 op0 = tmp;
1843 op1 = const0_rtx;
1844 break;
1846 default:
1847 gcc_unreachable ();
1850 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1853 const char *
1854 mep_emit_cbranch (rtx *operands, int ne)
1856 if (GET_CODE (operands[1]) == REG)
1857 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1858 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1859 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1860 else
1861 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1864 void
1865 mep_expand_call (rtx *operands, int returns_value)
1867 rtx addr = operands[returns_value];
1868 rtx tp = mep_tp_rtx ();
1869 rtx gp = mep_gp_rtx ();
1871 gcc_assert (GET_CODE (addr) == MEM);
1873 addr = XEXP (addr, 0);
1875 if (! mep_call_address_operand (addr, VOIDmode))
1876 addr = force_reg (SImode, addr);
1878 if (! operands[returns_value+2])
1879 operands[returns_value+2] = const0_rtx;
1881 if (returns_value)
1882 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1883 operands[3], tp, gp));
1884 else
1885 emit_call_insn (gen_call_internal (addr, operands[1],
1886 operands[2], tp, gp));
1889 /* Aliasing Support. */
1891 /* If X is a machine specific address (i.e. a symbol or label being
1892 referenced as a displacement from the GOT implemented using an
1893 UNSPEC), then return the base term. Otherwise return X. */
1896 mep_find_base_term (rtx x)
1898 rtx base, term;
1899 int unspec;
1901 if (GET_CODE (x) != PLUS)
1902 return x;
1903 base = XEXP (x, 0);
1904 term = XEXP (x, 1);
1906 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1907 && base == mep_tp_rtx ())
1908 unspec = UNS_TPREL;
1909 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1910 && base == mep_gp_rtx ())
1911 unspec = UNS_GPREL;
1912 else
1913 return x;
1915 if (GET_CODE (term) != CONST)
1916 return x;
1917 term = XEXP (term, 0);
1919 if (GET_CODE (term) != UNSPEC
1920 || XINT (term, 1) != unspec)
1921 return x;
1923 return XVECEXP (term, 0, 0);
1926 /* Reload Support. */
1928 /* Return true if the registers in CLASS cannot represent the change from
1929 modes FROM to TO. */
1931 bool
1932 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1933 enum reg_class regclass)
1935 if (from == to)
1936 return false;
1938 /* 64-bit COP regs must remain 64-bit COP regs. */
1939 if (TARGET_64BIT_CR_REGS
1940 && (regclass == CR_REGS
1941 || regclass == LOADABLE_CR_REGS)
1942 && (GET_MODE_SIZE (to) < 8
1943 || GET_MODE_SIZE (from) < 8))
1944 return true;
1946 return false;
1949 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1951 static bool
1952 mep_general_reg (rtx x)
1954 while (GET_CODE (x) == SUBREG)
1955 x = XEXP (x, 0);
1956 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1959 static bool
1960 mep_nongeneral_reg (rtx x)
1962 while (GET_CODE (x) == SUBREG)
1963 x = XEXP (x, 0);
1964 return (GET_CODE (x) == REG
1965 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1968 static bool
1969 mep_general_copro_reg (rtx x)
1971 while (GET_CODE (x) == SUBREG)
1972 x = XEXP (x, 0);
1973 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1976 static bool
1977 mep_nonregister (rtx x)
1979 while (GET_CODE (x) == SUBREG)
1980 x = XEXP (x, 0);
1981 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1984 #define DEBUG_RELOAD 0
1986 /* Return the secondary reload class needed for moving value X to or
1987 from a register in coprocessor register class CLASS. */
1989 static enum reg_class
1990 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1992 if (mep_general_reg (x))
1993 /* We can do the move directly if mep_have_core_copro_moves_p,
1994 otherwise we need to go through memory. Either way, no secondary
1995 register is needed. */
1996 return NO_REGS;
1998 if (mep_general_copro_reg (x))
2000 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2001 if (mep_have_copro_copro_moves_p)
2002 return NO_REGS;
2004 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2005 if (mep_have_core_copro_moves_p)
2006 return GENERAL_REGS;
2008 /* Otherwise we need to do it through memory. No secondary
2009 register is needed. */
2010 return NO_REGS;
2013 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2014 && constraint_satisfied_p (x, CONSTRAINT_U))
2015 /* X is a memory value that we can access directly. */
2016 return NO_REGS;
2018 /* We have to move X into a GPR first and then copy it to
2019 the coprocessor register. The move from the GPR to the
2020 coprocessor might be done directly or through memory,
2021 depending on mep_have_core_copro_moves_p. */
2022 return GENERAL_REGS;
2025 /* Copying X to register in RCLASS. */
2027 enum reg_class
2028 mep_secondary_input_reload_class (enum reg_class rclass,
2029 enum machine_mode mode ATTRIBUTE_UNUSED,
2030 rtx x)
2032 int rv = NO_REGS;
2034 #if DEBUG_RELOAD
2035 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2036 debug_rtx (x);
2037 #endif
2039 if (reg_class_subset_p (rclass, CR_REGS))
2040 rv = mep_secondary_copro_reload_class (rclass, x);
2041 else if (MEP_NONGENERAL_CLASS (rclass)
2042 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2043 rv = GENERAL_REGS;
2045 #if DEBUG_RELOAD
2046 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2047 #endif
2048 return (enum reg_class) rv;
2051 /* Copying register in RCLASS to X. */
2053 enum reg_class
2054 mep_secondary_output_reload_class (enum reg_class rclass,
2055 enum machine_mode mode ATTRIBUTE_UNUSED,
2056 rtx x)
2058 int rv = NO_REGS;
2060 #if DEBUG_RELOAD
2061 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2062 debug_rtx (x);
2063 #endif
2065 if (reg_class_subset_p (rclass, CR_REGS))
2066 rv = mep_secondary_copro_reload_class (rclass, x);
2067 else if (MEP_NONGENERAL_CLASS (rclass)
2068 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2069 rv = GENERAL_REGS;
2071 #if DEBUG_RELOAD
2072 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2073 #endif
2075 return (enum reg_class) rv;
2078 /* Implement SECONDARY_MEMORY_NEEDED. */
2080 bool
2081 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2082 enum machine_mode mode ATTRIBUTE_UNUSED)
2084 if (!mep_have_core_copro_moves_p)
2086 if (reg_classes_intersect_p (rclass1, CR_REGS)
2087 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2088 return true;
2089 if (reg_classes_intersect_p (rclass2, CR_REGS)
2090 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2091 return true;
2092 if (!mep_have_copro_copro_moves_p
2093 && reg_classes_intersect_p (rclass1, CR_REGS)
2094 && reg_classes_intersect_p (rclass2, CR_REGS))
2095 return true;
2097 return false;
2100 void
2101 mep_expand_reload (rtx *operands, enum machine_mode mode)
2103 /* There are three cases for each direction:
2104 register, farsym
2105 control, farsym
2106 control, nearsym */
2108 int s0 = mep_section_tag (operands[0]) == 'f';
2109 int s1 = mep_section_tag (operands[1]) == 'f';
2110 int c0 = mep_nongeneral_reg (operands[0]);
2111 int c1 = mep_nongeneral_reg (operands[1]);
2112 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2114 #if DEBUG_RELOAD
2115 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2116 debug_rtx (operands[0]);
2117 debug_rtx (operands[1]);
2118 #endif
2120 switch (which)
2122 case 00: /* Don't know why this gets here. */
2123 case 02: /* general = far */
2124 emit_move_insn (operands[0], operands[1]);
2125 return;
2127 case 10: /* cr = mem */
2128 case 11: /* cr = cr */
2129 case 01: /* mem = cr */
2130 case 12: /* cr = far */
2131 emit_move_insn (operands[2], operands[1]);
2132 emit_move_insn (operands[0], operands[2]);
2133 return;
2135 case 20: /* far = general */
2136 emit_move_insn (operands[2], XEXP (operands[1], 0));
2137 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2138 return;
2140 case 21: /* far = cr */
2141 case 22: /* far = far */
2142 default:
2143 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2144 which, mode_name[mode]);
2145 debug_rtx (operands[0]);
2146 debug_rtx (operands[1]);
2147 gcc_unreachable ();
2151 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2152 can be moved directly into registers 0 to 7, but not into the rest.
2153 If so, and if the required class includes registers 0 to 7, restrict
2154 it to those registers. */
2156 enum reg_class
2157 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2159 switch (GET_CODE (x))
2161 case CONST_INT:
2162 if (INTVAL (x) >= 0x10000
2163 && INTVAL (x) < 0x01000000
2164 && (INTVAL (x) & 0xffff) != 0
2165 && reg_class_subset_p (TPREL_REGS, rclass))
2166 rclass = TPREL_REGS;
2167 break;
2169 case CONST:
2170 case SYMBOL_REF:
2171 case LABEL_REF:
2172 if (mep_section_tag (x) != 'f'
2173 && reg_class_subset_p (TPREL_REGS, rclass))
2174 rclass = TPREL_REGS;
2175 break;
2177 default:
2178 break;
2180 return rclass;
2183 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2184 moves, 4 for direct double-register moves, and 1000 for anything
2185 that requires a temporary register or temporary stack slot. */
2188 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2190 if (mep_have_copro_copro_moves_p
2191 && reg_class_subset_p (from, CR_REGS)
2192 && reg_class_subset_p (to, CR_REGS))
2194 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2195 return 4;
2196 return 2;
2198 if (reg_class_subset_p (from, CR_REGS)
2199 && reg_class_subset_p (to, CR_REGS))
2201 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2202 return 8;
2203 return 4;
2205 if (reg_class_subset_p (from, CR_REGS)
2206 || reg_class_subset_p (to, CR_REGS))
2208 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2209 return 4;
2210 return 2;
2212 if (mep_secondary_memory_needed (from, to, mode))
2213 return 1000;
2214 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2215 return 1000;
2217 if (GET_MODE_SIZE (mode) > 4)
2218 return 4;
2220 return 2;
2224 /* Functions to save and restore machine-specific function data. */
2226 static struct machine_function *
2227 mep_init_machine_status (void)
2229 return ggc_cleared_alloc<machine_function> ();
2232 static rtx
2233 mep_allocate_initial_value (rtx reg)
2235 int rss;
2237 if (GET_CODE (reg) != REG)
2238 return NULL_RTX;
2240 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2241 return NULL_RTX;
2243 /* In interrupt functions, the "initial" values of $gp and $tp are
2244 provided by the prologue. They are not necessarily the same as
2245 the values that the caller was using. */
2246 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2247 if (mep_interrupt_p ())
2248 return NULL_RTX;
2250 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2252 cfun->machine->reg_save_size += 4;
2253 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2256 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2257 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2261 mep_return_addr_rtx (int count)
2263 if (count != 0)
2264 return const0_rtx;
2266 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2269 static rtx
2270 mep_tp_rtx (void)
2272 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2275 static rtx
2276 mep_gp_rtx (void)
2278 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2281 static bool
2282 mep_interrupt_p (void)
2284 if (cfun->machine->interrupt_handler == 0)
2286 int interrupt_handler
2287 = (lookup_attribute ("interrupt",
2288 DECL_ATTRIBUTES (current_function_decl))
2289 != NULL_TREE);
2290 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2292 return cfun->machine->interrupt_handler == 2;
2295 static bool
2296 mep_disinterrupt_p (void)
2298 if (cfun->machine->disable_interrupts == 0)
2300 int disable_interrupts
2301 = (lookup_attribute ("disinterrupt",
2302 DECL_ATTRIBUTES (current_function_decl))
2303 != NULL_TREE);
2304 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2306 return cfun->machine->disable_interrupts == 2;
2310 /* Frame/Epilog/Prolog Related. */
2312 static bool
2313 mep_reg_set_p (rtx reg, rtx insn)
2315 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2316 if (INSN_P (insn))
2318 if (FIND_REG_INC_NOTE (insn, reg))
2319 return true;
2320 insn = PATTERN (insn);
2323 if (GET_CODE (insn) == SET
2324 && GET_CODE (XEXP (insn, 0)) == REG
2325 && GET_CODE (XEXP (insn, 1)) == REG
2326 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2327 return false;
2329 return set_of (reg, insn) != NULL_RTX;
2333 #define MEP_SAVES_UNKNOWN 0
2334 #define MEP_SAVES_YES 1
2335 #define MEP_SAVES_MAYBE 2
2336 #define MEP_SAVES_NO 3
2338 static bool
2339 mep_reg_set_in_function (int regno)
2341 rtx reg;
2342 rtx_insn *insn;
2344 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2345 return true;
2347 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2348 return true;
2350 push_topmost_sequence ();
2351 insn = get_insns ();
2352 pop_topmost_sequence ();
2354 if (!insn)
2355 return false;
2357 reg = gen_rtx_REG (SImode, regno);
2359 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2360 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2361 return true;
2362 return false;
2365 static bool
2366 mep_asm_without_operands_p (void)
2368 if (cfun->machine->asms_without_operands == 0)
2370 rtx_insn *insn;
2372 push_topmost_sequence ();
2373 insn = get_insns ();
2374 pop_topmost_sequence ();
2376 cfun->machine->asms_without_operands = 1;
2377 while (insn)
2379 if (INSN_P (insn)
2380 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2382 cfun->machine->asms_without_operands = 2;
2383 break;
2385 insn = NEXT_INSN (insn);
2389 return cfun->machine->asms_without_operands == 2;
2392 /* Interrupt functions save/restore every call-preserved register, and
2393 any call-used register it uses (or all if it calls any function,
2394 since they may get clobbered there too). Here we check to see
2395 which call-used registers need saving. */
2397 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2398 && (r == FIRST_CCR_REGNO + 1 \
2399 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2400 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2402 static bool
2403 mep_interrupt_saved_reg (int r)
2405 if (!mep_interrupt_p ())
2406 return false;
2407 if (r == REGSAVE_CONTROL_TEMP
2408 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2409 return true;
2410 if (mep_asm_without_operands_p ()
2411 && (!fixed_regs[r]
2412 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2413 || IVC2_ISAVED_REG (r)))
2414 return true;
2415 if (!crtl->is_leaf)
2416 /* Function calls mean we need to save $lp. */
2417 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2418 return true;
2419 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2420 /* The interrupt handler might use these registers for repeat blocks,
2421 or it might call a function that does so. */
2422 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2423 return true;
2424 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2425 return false;
2426 /* Functions we call might clobber these. */
2427 if (call_used_regs[r] && !fixed_regs[r])
2428 return true;
2429 /* Additional registers that need to be saved for IVC2. */
2430 if (IVC2_ISAVED_REG (r))
2431 return true;
2433 return false;
2436 static bool
2437 mep_call_saves_register (int r)
2439 if (! cfun->machine->frame_locked)
2441 int rv = MEP_SAVES_NO;
2443 if (cfun->machine->reg_save_slot[r])
2444 rv = MEP_SAVES_YES;
2445 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2446 rv = MEP_SAVES_YES;
2447 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2448 rv = MEP_SAVES_YES;
2449 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2450 rv = MEP_SAVES_YES;
2451 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2452 /* We need these to have stack slots so that they can be set during
2453 unwinding. */
2454 rv = MEP_SAVES_YES;
2455 else if (mep_interrupt_saved_reg (r))
2456 rv = MEP_SAVES_YES;
2457 cfun->machine->reg_saved[r] = rv;
2459 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2462 /* Return true if epilogue uses register REGNO. */
2464 bool
2465 mep_epilogue_uses (int regno)
2467 /* Since $lp is a call-saved register, the generic code will normally
2468 mark it used in the epilogue if it needs to be saved and restored.
2469 However, when profiling is enabled, the profiling code will implicitly
2470 clobber $11. This case has to be handled specially both here and in
2471 mep_call_saves_register. */
2472 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2473 return true;
2474 /* Interrupt functions save/restore pretty much everything. */
2475 return (reload_completed && mep_interrupt_saved_reg (regno));
2478 static int
2479 mep_reg_size (int regno)
2481 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2482 return 8;
2483 return 4;
2486 /* Worker function for TARGET_CAN_ELIMINATE. */
2488 bool
2489 mep_can_eliminate (const int from, const int to)
2491 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2492 ? ! frame_pointer_needed
2493 : true);
2497 mep_elimination_offset (int from, int to)
2499 int reg_save_size;
2500 int i;
2501 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2502 int total_size;
2504 if (!cfun->machine->frame_locked)
2505 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2507 /* We don't count arg_regs_to_save in the arg pointer offset, because
2508 gcc thinks the arg pointer has moved along with the saved regs.
2509 However, we do count it when we adjust $sp in the prologue. */
2510 reg_save_size = 0;
2511 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2512 if (mep_call_saves_register (i))
2513 reg_save_size += mep_reg_size (i);
2515 if (reg_save_size % 8)
2516 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2517 else
2518 cfun->machine->regsave_filler = 0;
2520 /* This is what our total stack adjustment looks like. */
2521 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2523 if (total_size % 8)
2524 cfun->machine->frame_filler = 8 - (total_size % 8);
2525 else
2526 cfun->machine->frame_filler = 0;
2529 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2530 return reg_save_size + cfun->machine->regsave_filler;
2532 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2533 return cfun->machine->frame_filler + frame_size;
2535 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2536 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2538 gcc_unreachable ();
2541 static rtx_insn *
2542 F (rtx_insn *x)
2544 RTX_FRAME_RELATED_P (x) = 1;
2545 return x;
2548 /* Since the prologue/epilogue code is generated after optimization,
2549 we can't rely on gcc to split constants for us. So, this code
2550 captures all the ways to add a constant to a register in one logic
2551 chunk, including optimizing away insns we just don't need. This
2552 makes the prolog/epilog code easier to follow. */
2553 static void
2554 add_constant (int dest, int src, int value, int mark_frame)
2556 rtx_insn *insn;
2557 int hi, lo;
2559 if (src == dest && value == 0)
2560 return;
2562 if (value == 0)
2564 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2565 gen_rtx_REG (SImode, src));
2566 if (mark_frame)
2567 RTX_FRAME_RELATED_P(insn) = 1;
2568 return;
2571 if (value >= -32768 && value <= 32767)
2573 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2574 gen_rtx_REG (SImode, src),
2575 GEN_INT (value)));
2576 if (mark_frame)
2577 RTX_FRAME_RELATED_P(insn) = 1;
2578 return;
2581 /* Big constant, need to use a temp register. We use
2582 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2583 area is always small enough to directly add to). */
2585 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2586 lo = value & 0xffff;
2588 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2589 GEN_INT (hi));
2591 if (lo)
2593 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2594 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2595 GEN_INT (lo)));
2598 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2599 gen_rtx_REG (SImode, src),
2600 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2601 if (mark_frame)
2603 RTX_FRAME_RELATED_P(insn) = 1;
2604 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2605 gen_rtx_SET (SImode,
2606 gen_rtx_REG (SImode, dest),
2607 gen_rtx_PLUS (SImode,
2608 gen_rtx_REG (SImode, dest),
2609 GEN_INT (value))));
2613 /* Move SRC to DEST. Mark the move as being potentially dead if
2614 MAYBE_DEAD_P. */
2616 static rtx_insn *
2617 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2619 rtx_insn *insn = emit_move_insn (dest, src);
2620 #if 0
2621 if (maybe_dead_p)
2622 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2623 #endif
2624 return insn;
2627 /* Used for interrupt functions, which can't assume that $tp and $gp
2628 contain the correct pointers. */
2630 static void
2631 mep_reload_pointer (int regno, const char *symbol)
2633 rtx reg, sym;
2635 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2636 return;
2638 reg = gen_rtx_REG (SImode, regno);
2639 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2640 emit_insn (gen_movsi_topsym_s (reg, sym));
2641 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2644 /* Assign save slots for any register not already saved. DImode
2645 registers go at the end of the reg save area; the rest go at the
2646 beginning. This is for alignment purposes. Returns true if a frame
2647 is really needed. */
2648 static bool
2649 mep_assign_save_slots (int reg_save_size)
2651 bool really_need_stack_frame = false;
2652 int di_ofs = 0;
2653 int i;
2655 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2656 if (mep_call_saves_register(i))
2658 int regsize = mep_reg_size (i);
2660 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2661 || mep_reg_set_in_function (i))
2662 really_need_stack_frame = true;
2664 if (cfun->machine->reg_save_slot[i])
2665 continue;
2667 if (regsize < 8)
2669 cfun->machine->reg_save_size += regsize;
2670 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2672 else
2674 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2675 di_ofs += 8;
2678 cfun->machine->frame_locked = 1;
2679 return really_need_stack_frame;
2682 void
2683 mep_expand_prologue (void)
2685 int i, rss, sp_offset = 0;
2686 int reg_save_size;
2687 int frame_size;
2688 int really_need_stack_frame;
2690 /* We must not allow register renaming in interrupt functions,
2691 because that invalidates the correctness of the set of call-used
2692 registers we're going to save/restore. */
2693 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2695 if (mep_disinterrupt_p ())
2696 emit_insn (gen_mep_disable_int ());
2698 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2700 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2701 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2702 really_need_stack_frame = frame_size;
2704 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2706 sp_offset = reg_save_size;
2707 if (sp_offset + frame_size < 128)
2708 sp_offset += frame_size ;
2710 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2712 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2713 if (mep_call_saves_register(i))
2715 rtx mem;
2716 bool maybe_dead_p;
2717 enum machine_mode rmode;
2719 rss = cfun->machine->reg_save_slot[i];
2721 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2722 && (!mep_reg_set_in_function (i)
2723 && !mep_interrupt_p ()))
2724 continue;
2726 if (mep_reg_size (i) == 8)
2727 rmode = DImode;
2728 else
2729 rmode = SImode;
2731 /* If there is a pseudo associated with this register's initial value,
2732 reload might have already spilt it to the stack slot suggested by
2733 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2734 deleted as dead. */
2735 mem = gen_rtx_MEM (rmode,
2736 plus_constant (Pmode, stack_pointer_rtx,
2737 sp_offset - rss));
2738 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2740 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2741 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2742 else if (rmode == DImode)
2744 rtx_insn *insn;
2745 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2747 mem = gen_rtx_MEM (SImode,
2748 plus_constant (Pmode, stack_pointer_rtx,
2749 sp_offset - rss + be));
2751 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2752 gen_rtx_REG (SImode, i),
2753 maybe_dead_p);
2754 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2755 gen_rtx_ZERO_EXTRACT (SImode,
2756 gen_rtx_REG (DImode, i),
2757 GEN_INT (32),
2758 GEN_INT (32)),
2759 maybe_dead_p);
2760 insn = maybe_dead_move (mem,
2761 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2762 maybe_dead_p);
2763 RTX_FRAME_RELATED_P (insn) = 1;
2765 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2766 gen_rtx_SET (VOIDmode,
2767 copy_rtx (mem),
2768 gen_rtx_REG (rmode, i)));
2769 mem = gen_rtx_MEM (SImode,
2770 plus_constant (Pmode, stack_pointer_rtx,
2771 sp_offset - rss + (4-be)));
2772 insn = maybe_dead_move (mem,
2773 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2774 maybe_dead_p);
2776 else
2778 rtx_insn *insn;
2779 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2780 gen_rtx_REG (rmode, i),
2781 maybe_dead_p);
2782 insn = maybe_dead_move (mem,
2783 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2784 maybe_dead_p);
2785 RTX_FRAME_RELATED_P (insn) = 1;
2787 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2788 gen_rtx_SET (VOIDmode,
2789 copy_rtx (mem),
2790 gen_rtx_REG (rmode, i)));
2794 if (frame_pointer_needed)
2796 /* We've already adjusted down by sp_offset. Total $sp change
2797 is reg_save_size + frame_size. We want a net change here of
2798 just reg_save_size. */
2799 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2802 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2804 if (mep_interrupt_p ())
2806 mep_reload_pointer(GP_REGNO, "__sdabase");
2807 mep_reload_pointer(TP_REGNO, "__tpbase");
2811 static void
2812 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2814 int local = hwi_local;
2815 int frame_size = local + crtl->outgoing_args_size;
2816 int reg_save_size;
2817 int ffill;
2818 int i, sp, skip;
2819 int sp_offset;
2820 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2822 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2823 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2824 sp_offset = reg_save_size + frame_size;
2826 ffill = cfun->machine->frame_filler;
2828 if (cfun->machine->mep_frame_pointer_needed)
2829 reg_names[FP_REGNO] = "$fp";
2830 else
2831 reg_names[FP_REGNO] = "$8";
2833 if (sp_offset == 0)
2834 return;
2836 if (debug_info_level == DINFO_LEVEL_NONE)
2838 fprintf (file, "\t# frame: %d", sp_offset);
2839 if (reg_save_size)
2840 fprintf (file, " %d regs", reg_save_size);
2841 if (local)
2842 fprintf (file, " %d locals", local);
2843 if (crtl->outgoing_args_size)
2844 fprintf (file, " %d args", crtl->outgoing_args_size);
2845 fprintf (file, "\n");
2846 return;
2849 fprintf (file, "\t#\n");
2850 fprintf (file, "\t# Initial Frame Information:\n");
2851 if (sp_offset || !frame_pointer_needed)
2852 fprintf (file, "\t# Entry ---------- 0\n");
2854 /* Sort registers by save slots, so they're printed in the order
2855 they appear in memory, not the order they're saved in. */
2856 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2857 slot_map[si] = si;
2858 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2859 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2860 if (cfun->machine->reg_save_slot[slot_map[si]]
2861 > cfun->machine->reg_save_slot[slot_map[sj]])
2863 int t = slot_map[si];
2864 slot_map[si] = slot_map[sj];
2865 slot_map[sj] = t;
2868 sp = 0;
2869 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2871 int rsize;
2872 int r = slot_map[i];
2873 int rss = cfun->machine->reg_save_slot[r];
2875 if (!mep_call_saves_register (r))
2876 continue;
2878 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2879 && (!mep_reg_set_in_function (r)
2880 && !mep_interrupt_p ()))
2881 continue;
2883 rsize = mep_reg_size(r);
2884 skip = rss - (sp+rsize);
2885 if (skip)
2886 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2887 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2888 rsize, reg_names[r], sp_offset - rss);
2889 sp = rss;
2892 skip = reg_save_size - sp;
2893 if (skip)
2894 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2896 if (frame_pointer_needed)
2897 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2898 if (local)
2899 fprintf (file, "\t# %3d bytes for local vars\n", local);
2900 if (ffill)
2901 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2902 if (crtl->outgoing_args_size)
2903 fprintf (file, "\t# %3d bytes for outgoing args\n",
2904 crtl->outgoing_args_size);
2905 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2906 fprintf (file, "\t#\n");
2910 static int mep_prevent_lp_restore = 0;
2911 static int mep_sibcall_epilogue = 0;
2913 void
2914 mep_expand_epilogue (void)
2916 int i, sp_offset = 0;
2917 int reg_save_size = 0;
2918 int frame_size;
2919 int lp_temp = LP_REGNO, lp_slot = -1;
2920 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2921 int interrupt_handler = mep_interrupt_p ();
2923 if (profile_arc_flag == 2)
2924 emit_insn (gen_mep_bb_trace_ret ());
2926 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2927 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2929 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2931 if (frame_pointer_needed)
2933 /* If we have a frame pointer, we won't have a reliable stack
2934 pointer (alloca, you know), so rebase SP from FP */
2935 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2936 gen_rtx_REG (SImode, FP_REGNO));
2937 sp_offset = reg_save_size;
2939 else
2941 /* SP is right under our local variable space. Adjust it if
2942 needed. */
2943 sp_offset = reg_save_size + frame_size;
2944 if (sp_offset >= 128)
2946 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2947 sp_offset -= frame_size;
2951 /* This is backwards so that we restore the control and coprocessor
2952 registers before the temporary registers we use to restore
2953 them. */
2954 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2955 if (mep_call_saves_register (i))
2957 enum machine_mode rmode;
2958 int rss = cfun->machine->reg_save_slot[i];
2960 if (mep_reg_size (i) == 8)
2961 rmode = DImode;
2962 else
2963 rmode = SImode;
2965 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2966 && !(mep_reg_set_in_function (i) || interrupt_handler))
2967 continue;
2968 if (mep_prevent_lp_restore && i == LP_REGNO)
2969 continue;
2970 if (!mep_prevent_lp_restore
2971 && !interrupt_handler
2972 && (i == 10 || i == 11))
2973 continue;
2975 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2976 emit_move_insn (gen_rtx_REG (rmode, i),
2977 gen_rtx_MEM (rmode,
2978 plus_constant (Pmode, stack_pointer_rtx,
2979 sp_offset - rss)));
2980 else
2982 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2983 /* Defer this one so we can jump indirect rather than
2984 copying the RA to $lp and "ret". EH epilogues
2985 automatically skip this anyway. */
2986 lp_slot = sp_offset-rss;
2987 else
2989 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2990 gen_rtx_MEM (rmode,
2991 plus_constant (Pmode,
2992 stack_pointer_rtx,
2993 sp_offset-rss)));
2994 emit_move_insn (gen_rtx_REG (rmode, i),
2995 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2999 if (lp_slot != -1)
3001 /* Restore this one last so we know it will be in the temp
3002 register when we return by jumping indirectly via the temp. */
3003 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3004 gen_rtx_MEM (SImode,
3005 plus_constant (Pmode, stack_pointer_rtx,
3006 lp_slot)));
3007 lp_temp = REGSAVE_CONTROL_TEMP;
3011 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3013 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3014 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3015 gen_rtx_REG (SImode, SP_REGNO),
3016 cfun->machine->eh_stack_adjust));
3018 if (mep_sibcall_epilogue)
3019 return;
3021 if (mep_disinterrupt_p ())
3022 emit_insn (gen_mep_enable_int ());
3024 if (mep_prevent_lp_restore)
3026 emit_jump_insn (gen_eh_return_internal ());
3027 emit_barrier ();
3029 else if (interrupt_handler)
3030 emit_jump_insn (gen_mep_reti ());
3031 else
3032 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3035 void
3036 mep_expand_eh_return (rtx *operands)
3038 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3040 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3041 emit_move_insn (ra, operands[0]);
3042 operands[0] = ra;
3045 emit_insn (gen_eh_epilogue (operands[0]));
3048 void
3049 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3051 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3052 mep_prevent_lp_restore = 1;
3053 mep_expand_epilogue ();
3054 mep_prevent_lp_restore = 0;
3057 void
3058 mep_expand_sibcall_epilogue (void)
3060 mep_sibcall_epilogue = 1;
3061 mep_expand_epilogue ();
3062 mep_sibcall_epilogue = 0;
3065 static bool
3066 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3068 if (decl == NULL)
3069 return false;
3071 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3072 return false;
3074 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3075 if (mep_interrupt_p () || mep_disinterrupt_p ())
3076 return false;
3078 return true;
3082 mep_return_stackadj_rtx (void)
3084 return gen_rtx_REG (SImode, 10);
3088 mep_return_handler_rtx (void)
3090 return gen_rtx_REG (SImode, LP_REGNO);
3093 void
3094 mep_function_profiler (FILE *file)
3096 /* Always right at the beginning of the function. */
3097 fprintf (file, "\t# mep function profiler\n");
3098 fprintf (file, "\tadd\t$sp, -8\n");
3099 fprintf (file, "\tsw\t$0, ($sp)\n");
3100 fprintf (file, "\tldc\t$0, $lp\n");
3101 fprintf (file, "\tsw\t$0, 4($sp)\n");
3102 fprintf (file, "\tbsr\t__mep_mcount\n");
3103 fprintf (file, "\tlw\t$0, 4($sp)\n");
3104 fprintf (file, "\tstc\t$0, $lp\n");
3105 fprintf (file, "\tlw\t$0, ($sp)\n");
3106 fprintf (file, "\tadd\t$sp, 8\n\n");
3109 const char *
3110 mep_emit_bb_trace_ret (void)
3112 fprintf (asm_out_file, "\t# end of block profiling\n");
3113 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3114 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3115 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3116 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3117 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3118 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3119 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3120 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3121 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3122 return "";
3125 #undef SAVE
3126 #undef RESTORE
3128 /* Operand Printing. */
3130 void
3131 mep_print_operand_address (FILE *stream, rtx address)
3133 if (GET_CODE (address) == MEM)
3134 address = XEXP (address, 0);
3135 else
3136 /* cf: gcc.dg/asm-4.c. */
3137 gcc_assert (GET_CODE (address) == REG);
3139 mep_print_operand (stream, address, 0);
3142 static struct
3144 char code;
3145 const char *pattern;
3146 const char *format;
3148 const conversions[] =
3150 { 0, "r", "0" },
3151 { 0, "m+ri", "3(2)" },
3152 { 0, "mr", "(1)" },
3153 { 0, "ms", "(1)" },
3154 { 0, "ml", "(1)" },
3155 { 0, "mLrs", "%lo(3)(2)" },
3156 { 0, "mLr+si", "%lo(4+5)(2)" },
3157 { 0, "m+ru2s", "%tpoff(5)(2)" },
3158 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3159 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3160 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3161 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3162 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3163 { 0, "mi", "(1)" },
3164 { 0, "m+si", "(2+3)" },
3165 { 0, "m+li", "(2+3)" },
3166 { 0, "i", "0" },
3167 { 0, "s", "0" },
3168 { 0, "+si", "1+2" },
3169 { 0, "+u2si", "%tpoff(3+4)" },
3170 { 0, "+u3si", "%sdaoff(3+4)" },
3171 { 0, "l", "0" },
3172 { 'b', "i", "0" },
3173 { 'B', "i", "0" },
3174 { 'U', "i", "0" },
3175 { 'h', "i", "0" },
3176 { 'h', "Hs", "%hi(1)" },
3177 { 'I', "i", "0" },
3178 { 'I', "u2s", "%tpoff(2)" },
3179 { 'I', "u3s", "%sdaoff(2)" },
3180 { 'I', "+u2si", "%tpoff(3+4)" },
3181 { 'I', "+u3si", "%sdaoff(3+4)" },
3182 { 'J', "i", "0" },
3183 { 'P', "mr", "(1\\+),\\0" },
3184 { 'x', "i", "0" },
3185 { 0, 0, 0 }
3188 static int
3189 unique_bit_in (HOST_WIDE_INT i)
3191 switch (i & 0xff)
3193 case 0x01: case 0xfe: return 0;
3194 case 0x02: case 0xfd: return 1;
3195 case 0x04: case 0xfb: return 2;
3196 case 0x08: case 0xf7: return 3;
3197 case 0x10: case 0x7f: return 4;
3198 case 0x20: case 0xbf: return 5;
3199 case 0x40: case 0xdf: return 6;
3200 case 0x80: case 0xef: return 7;
3201 default:
3202 gcc_unreachable ();
3206 static int
3207 bit_size_for_clip (HOST_WIDE_INT i)
3209 int rv;
3211 for (rv = 0; rv < 31; rv ++)
3212 if (((HOST_WIDE_INT) 1 << rv) > i)
3213 return rv + 1;
3214 gcc_unreachable ();
3217 /* Print an operand to a assembler instruction. */
3219 void
3220 mep_print_operand (FILE *file, rtx x, int code)
3222 int i, j;
3223 const char *real_name;
3225 if (code == '<')
3227 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3228 we're using, then skip over the "mep_" part of its name. */
3229 const struct cgen_insn *insn;
3231 if (mep_get_move_insn (mep_cmov, &insn))
3232 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3233 else
3234 mep_intrinsic_unavailable (mep_cmov);
3235 return;
3237 if (code == 'L')
3239 switch (GET_CODE (x))
3241 case AND:
3242 fputs ("clr", file);
3243 return;
3244 case IOR:
3245 fputs ("set", file);
3246 return;
3247 case XOR:
3248 fputs ("not", file);
3249 return;
3250 default:
3251 output_operand_lossage ("invalid %%L code");
3254 if (code == 'M')
3256 /* Print the second operand of a CR <- CR move. If we're using
3257 a two-operand instruction (i.e., a real cmov), then just print
3258 the operand normally. If we're using a "reg, reg, immediate"
3259 instruction such as caddi3, print the operand followed by a
3260 zero field. If we're using a three-register instruction,
3261 print the operand twice. */
3262 const struct cgen_insn *insn;
3264 mep_print_operand (file, x, 0);
3265 if (mep_get_move_insn (mep_cmov, &insn)
3266 && insn_data[insn->icode].n_operands == 3)
3268 fputs (", ", file);
3269 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3270 mep_print_operand (file, x, 0);
3271 else
3272 mep_print_operand (file, const0_rtx, 0);
3274 return;
3277 encode_pattern (x);
3278 for (i = 0; conversions[i].pattern; i++)
3279 if (conversions[i].code == code
3280 && strcmp(conversions[i].pattern, pattern) == 0)
3282 for (j = 0; conversions[i].format[j]; j++)
3283 if (conversions[i].format[j] == '\\')
3285 fputc (conversions[i].format[j+1], file);
3286 j++;
3288 else if (ISDIGIT(conversions[i].format[j]))
3290 rtx r = patternr[conversions[i].format[j] - '0'];
3291 switch (GET_CODE (r))
3293 case REG:
3294 fprintf (file, "%s", reg_names [REGNO (r)]);
3295 break;
3296 case CONST_INT:
3297 switch (code)
3299 case 'b':
3300 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3301 break;
3302 case 'B':
3303 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3304 break;
3305 case 'h':
3306 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3307 break;
3308 case 'U':
3309 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3310 break;
3311 case 'J':
3312 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3313 break;
3314 case 'x':
3315 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3316 && !(INTVAL (r) & 0xff))
3317 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3318 else
3319 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3320 break;
3321 case 'I':
3322 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3323 && conversions[i].format[j+1] == 0)
3325 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3326 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3328 else
3329 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3330 break;
3331 default:
3332 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3333 break;
3335 break;
3336 case CONST_DOUBLE:
3337 fprintf(file, "[const_double 0x%lx]",
3338 (unsigned long) CONST_DOUBLE_HIGH(r));
3339 break;
3340 case SYMBOL_REF:
3341 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3342 assemble_name (file, real_name);
3343 break;
3344 case LABEL_REF:
3345 output_asm_label (r);
3346 break;
3347 default:
3348 fprintf (stderr, "don't know how to print this operand:");
3349 debug_rtx (r);
3350 gcc_unreachable ();
3353 else
3355 if (conversions[i].format[j] == '+'
3356 && (!code || code == 'I')
3357 && ISDIGIT (conversions[i].format[j+1])
3358 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3359 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3360 continue;
3361 fputc(conversions[i].format[j], file);
3363 break;
3365 if (!conversions[i].pattern)
3367 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3368 debug_rtx(x);
3371 return;
3374 void
3375 mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
3376 int noperands ATTRIBUTE_UNUSED)
3378 /* Despite the fact that MeP is perfectly capable of branching and
3379 doing something else in the same bundle, gcc does jump
3380 optimization *after* scheduling, so we cannot trust the bundling
3381 flags on jump instructions. */
3382 if (GET_MODE (insn) == BImode
3383 && get_attr_slots (insn) != SLOTS_CORE)
3384 fputc ('+', asm_out_file);
3387 /* Function args in registers. */
3389 static void
3390 mep_setup_incoming_varargs (cumulative_args_t cum,
3391 enum machine_mode mode ATTRIBUTE_UNUSED,
3392 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3393 int second_time ATTRIBUTE_UNUSED)
3395 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3397 if (nsave > 0)
3398 cfun->machine->arg_regs_to_save = nsave;
3399 *pretend_size = nsave * 4;
3402 static int
3403 bytesize (const_tree type, enum machine_mode mode)
3405 if (mode == BLKmode)
3406 return int_size_in_bytes (type);
3407 return GET_MODE_SIZE (mode);
3410 static rtx
3411 mep_expand_builtin_saveregs (void)
3413 int bufsize, i, ns;
3414 rtx regbuf;
3416 ns = cfun->machine->arg_regs_to_save;
3417 if (TARGET_IVC2)
3419 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3420 regbuf = assign_stack_local (SImode, bufsize, 64);
3422 else
3424 bufsize = ns * 4;
3425 regbuf = assign_stack_local (SImode, bufsize, 32);
3428 move_block_from_reg (5-ns, regbuf, ns);
3430 if (TARGET_IVC2)
3432 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3433 int ofs = 8 * ((ns+1)/2);
3435 for (i=0; i<ns; i++)
3437 int rn = (4-ns) + i + 49;
3438 rtx ptr;
3440 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3441 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3442 ofs += 8;
3445 return XEXP (regbuf, 0);
3448 static tree
3449 mep_build_builtin_va_list (void)
3451 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3452 tree record;
3455 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3457 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3458 get_identifier ("__va_next_gp"), ptr_type_node);
3459 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3460 get_identifier ("__va_next_gp_limit"),
3461 ptr_type_node);
3462 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3463 ptr_type_node);
3464 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3465 ptr_type_node);
3467 DECL_FIELD_CONTEXT (f_next_gp) = record;
3468 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3469 DECL_FIELD_CONTEXT (f_next_cop) = record;
3470 DECL_FIELD_CONTEXT (f_next_stack) = record;
3472 TYPE_FIELDS (record) = f_next_gp;
3473 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3474 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3475 DECL_CHAIN (f_next_cop) = f_next_stack;
3477 layout_type (record);
3479 return record;
3482 static void
3483 mep_expand_va_start (tree valist, rtx nextarg)
3485 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3486 tree next_gp, next_gp_limit, next_cop, next_stack;
3487 tree t, u;
3488 int ns;
3490 ns = cfun->machine->arg_regs_to_save;
3492 f_next_gp = TYPE_FIELDS (va_list_type_node);
3493 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3494 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3495 f_next_stack = DECL_CHAIN (f_next_cop);
3497 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3498 NULL_TREE);
3499 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3500 valist, f_next_gp_limit, NULL_TREE);
3501 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3502 NULL_TREE);
3503 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3504 valist, f_next_stack, NULL_TREE);
3506 /* va_list.next_gp = expand_builtin_saveregs (); */
3507 u = make_tree (sizetype, expand_builtin_saveregs ());
3508 u = fold_convert (ptr_type_node, u);
3509 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3510 TREE_SIDE_EFFECTS (t) = 1;
3511 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3513 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3514 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3515 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3516 TREE_SIDE_EFFECTS (t) = 1;
3517 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3519 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3520 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3521 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3522 TREE_SIDE_EFFECTS (t) = 1;
3523 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3525 /* va_list.next_stack = nextarg; */
3526 u = make_tree (ptr_type_node, nextarg);
3527 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3528 TREE_SIDE_EFFECTS (t) = 1;
3529 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3532 static tree
3533 mep_gimplify_va_arg_expr (tree valist, tree type,
3534 gimple_seq *pre_p,
3535 gimple_seq *post_p ATTRIBUTE_UNUSED)
3537 HOST_WIDE_INT size, rsize;
3538 bool by_reference, ivc2_vec;
3539 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3540 tree next_gp, next_gp_limit, next_cop, next_stack;
3541 tree label_sover, label_selse;
3542 tree tmp, res_addr;
3544 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3546 size = int_size_in_bytes (type);
3547 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3549 if (by_reference)
3551 type = build_pointer_type (type);
3552 size = 4;
3554 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3556 f_next_gp = TYPE_FIELDS (va_list_type_node);
3557 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3558 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3559 f_next_stack = DECL_CHAIN (f_next_cop);
3561 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3562 NULL_TREE);
3563 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3564 valist, f_next_gp_limit, NULL_TREE);
3565 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3566 NULL_TREE);
3567 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3568 valist, f_next_stack, NULL_TREE);
3570 /* if f_next_gp < f_next_gp_limit
3571 IF (VECTOR_P && IVC2)
3572 val = *f_next_cop;
3573 ELSE
3574 val = *f_next_gp;
3575 f_next_gp += 4;
3576 f_next_cop += 8;
3577 else
3578 label_selse:
3579 val = *f_next_stack;
3580 f_next_stack += rsize;
3581 label_sover:
3584 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3585 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3586 res_addr = create_tmp_var (ptr_type_node, NULL);
3588 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3589 unshare_expr (next_gp_limit));
3590 tmp = build3 (COND_EXPR, void_type_node, tmp,
3591 build1 (GOTO_EXPR, void_type_node,
3592 unshare_expr (label_selse)),
3593 NULL_TREE);
3594 gimplify_and_add (tmp, pre_p);
3596 if (ivc2_vec)
3598 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3599 gimplify_and_add (tmp, pre_p);
3601 else
3603 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3604 gimplify_and_add (tmp, pre_p);
3607 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3608 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3610 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3611 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3613 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3614 gimplify_and_add (tmp, pre_p);
3616 /* - - */
3618 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3619 gimplify_and_add (tmp, pre_p);
3621 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3622 gimplify_and_add (tmp, pre_p);
3624 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3625 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3627 /* - - */
3629 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3630 gimplify_and_add (tmp, pre_p);
3632 res_addr = fold_convert (build_pointer_type (type), res_addr);
3634 if (by_reference)
3635 res_addr = build_va_arg_indirect_ref (res_addr);
3637 return build_va_arg_indirect_ref (res_addr);
3640 void
3641 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3642 rtx libname ATTRIBUTE_UNUSED,
3643 tree fndecl ATTRIBUTE_UNUSED)
3645 pcum->nregs = 0;
3647 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3648 pcum->vliw = 1;
3649 else
3650 pcum->vliw = 0;
3653 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3654 larger than 4 bytes are passed indirectly. Return value in 0,
3655 unless bigger than 4 bytes, then the caller passes a pointer as the
3656 first arg. For varargs, we copy $1..$4 to the stack. */
3658 static rtx
3659 mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
3660 const_tree type ATTRIBUTE_UNUSED,
3661 bool named ATTRIBUTE_UNUSED)
3663 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3665 /* VOIDmode is a signal for the backend to pass data to the call
3666 expander via the second operand to the call pattern. We use
3667 this to determine whether to use "jsr" or "jsrv". */
3668 if (mode == VOIDmode)
3669 return GEN_INT (cum->vliw);
3671 /* If we havn't run out of argument registers, return the next. */
3672 if (cum->nregs < 4)
3674 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3675 return gen_rtx_REG (mode, cum->nregs + 49);
3676 else
3677 return gen_rtx_REG (mode, cum->nregs + 1);
3680 /* Otherwise the argument goes on the stack. */
3681 return NULL_RTX;
3684 static bool
3685 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3686 enum machine_mode mode,
3687 const_tree type,
3688 bool named ATTRIBUTE_UNUSED)
3690 int size = bytesize (type, mode);
3692 /* This is non-obvious, but yes, large values passed after we've run
3693 out of registers are *still* passed by reference - we put the
3694 address of the parameter on the stack, as well as putting the
3695 parameter itself elsewhere on the stack. */
3697 if (size <= 0 || size > 8)
3698 return true;
3699 if (size <= 4)
3700 return false;
3701 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3702 && type != NULL_TREE && VECTOR_TYPE_P (type))
3703 return false;
3704 return true;
3707 static void
3708 mep_function_arg_advance (cumulative_args_t pcum,
3709 enum machine_mode mode ATTRIBUTE_UNUSED,
3710 const_tree type ATTRIBUTE_UNUSED,
3711 bool named ATTRIBUTE_UNUSED)
3713 get_cumulative_args (pcum)->nregs += 1;
3716 bool
3717 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3719 int size = bytesize (type, BLKmode);
3720 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3721 return size > 0 && size <= 8 ? 0 : 1;
3722 return size > 0 && size <= 4 ? 0 : 1;
3725 static bool
3726 mep_narrow_volatile_bitfield (void)
3728 return true;
3729 return false;
3732 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3735 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3737 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3738 return gen_rtx_REG (TYPE_MODE (type), 48);
3739 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3742 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3745 mep_libcall_value (enum machine_mode mode)
3747 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3750 /* Handle pipeline hazards. */
3752 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3753 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3755 static int prev_opcode = 0;
3757 /* This isn't as optimal as it could be, because we don't know what
3758 control register the STC opcode is storing in. We only need to add
3759 the nop if it's the relevant register, but we add it for irrelevant
3760 registers also. */
3762 void
3763 mep_asm_output_opcode (FILE *file, const char *ptr)
3765 int this_opcode = op_none;
3766 const char *hazard = 0;
3768 switch (*ptr)
3770 case 'f':
3771 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3772 this_opcode = op_fsft;
3773 break;
3774 case 'r':
3775 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3776 this_opcode = op_ret;
3777 break;
3778 case 's':
3779 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3780 this_opcode = op_stc;
3781 break;
3784 if (prev_opcode == op_stc && this_opcode == op_fsft)
3785 hazard = "nop";
3786 if (prev_opcode == op_stc && this_opcode == op_ret)
3787 hazard = "nop";
3789 if (hazard)
3790 fprintf(file, "%s\t# %s-%s hazard\n\t",
3791 hazard, opnames[prev_opcode], opnames[this_opcode]);
3793 prev_opcode = this_opcode;
3796 /* Handle attributes. */
3798 static tree
3799 mep_validate_based_tiny (tree *node, tree name, tree args,
3800 int flags ATTRIBUTE_UNUSED, bool *no_add)
3802 if (TREE_CODE (*node) != VAR_DECL
3803 && TREE_CODE (*node) != POINTER_TYPE
3804 && TREE_CODE (*node) != TYPE_DECL)
3806 warning (0, "%qE attribute only applies to variables", name);
3807 *no_add = true;
3809 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3811 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3813 warning (0, "address region attributes not allowed with auto storage class");
3814 *no_add = true;
3816 /* Ignore storage attribute of pointed to variable: char __far * x; */
3817 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3819 warning (0, "address region attributes on pointed-to types ignored");
3820 *no_add = true;
3824 return NULL_TREE;
3827 static int
3828 mep_multiple_address_regions (tree list, bool check_section_attr)
3830 tree a;
3831 int count_sections = 0;
3832 int section_attr_count = 0;
3834 for (a = list; a; a = TREE_CHAIN (a))
3836 if (is_attribute_p ("based", TREE_PURPOSE (a))
3837 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3838 || is_attribute_p ("near", TREE_PURPOSE (a))
3839 || is_attribute_p ("far", TREE_PURPOSE (a))
3840 || is_attribute_p ("io", TREE_PURPOSE (a)))
3841 count_sections ++;
3842 if (check_section_attr)
3843 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3846 if (check_section_attr)
3847 return section_attr_count;
3848 else
3849 return count_sections;
3852 #define MEP_ATTRIBUTES(decl) \
3853 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3854 : DECL_ATTRIBUTES (decl) \
3855 ? (DECL_ATTRIBUTES (decl)) \
3856 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3858 static tree
3859 mep_validate_near_far (tree *node, tree name, tree args,
3860 int flags ATTRIBUTE_UNUSED, bool *no_add)
3862 if (TREE_CODE (*node) != VAR_DECL
3863 && TREE_CODE (*node) != FUNCTION_DECL
3864 && TREE_CODE (*node) != METHOD_TYPE
3865 && TREE_CODE (*node) != POINTER_TYPE
3866 && TREE_CODE (*node) != TYPE_DECL)
3868 warning (0, "%qE attribute only applies to variables and functions",
3869 name);
3870 *no_add = true;
3872 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3874 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3876 warning (0, "address region attributes not allowed with auto storage class");
3877 *no_add = true;
3879 /* Ignore storage attribute of pointed to variable: char __far * x; */
3880 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3882 warning (0, "address region attributes on pointed-to types ignored");
3883 *no_add = true;
3886 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3888 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3889 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3890 DECL_ATTRIBUTES (*node) = NULL_TREE;
3892 return NULL_TREE;
3895 static tree
3896 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3897 int flags ATTRIBUTE_UNUSED, bool *no_add)
3899 if (TREE_CODE (*node) != FUNCTION_DECL
3900 && TREE_CODE (*node) != METHOD_TYPE)
3902 warning (0, "%qE attribute only applies to functions", name);
3903 *no_add = true;
3905 return NULL_TREE;
3908 static tree
3909 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3910 int flags ATTRIBUTE_UNUSED, bool *no_add)
3912 tree function_type;
3914 if (TREE_CODE (*node) != FUNCTION_DECL)
3916 warning (0, "%qE attribute only applies to functions", name);
3917 *no_add = true;
3918 return NULL_TREE;
3921 if (DECL_DECLARED_INLINE_P (*node))
3922 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3923 DECL_UNINLINABLE (*node) = 1;
3925 function_type = TREE_TYPE (*node);
3927 if (TREE_TYPE (function_type) != void_type_node)
3928 error ("interrupt function must have return type of void");
3930 if (prototype_p (function_type)
3931 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3932 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3933 error ("interrupt function must have no arguments");
3935 return NULL_TREE;
3938 static tree
3939 mep_validate_io_cb (tree *node, tree name, tree args,
3940 int flags ATTRIBUTE_UNUSED, bool *no_add)
3942 if (TREE_CODE (*node) != VAR_DECL)
3944 warning (0, "%qE attribute only applies to variables", name);
3945 *no_add = true;
3948 if (args != NULL_TREE)
3950 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3951 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3952 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3954 warning (0, "%qE attribute allows only an integer constant argument",
3955 name);
3956 *no_add = true;
3960 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3961 TREE_THIS_VOLATILE (*node) = 1;
3963 return NULL_TREE;
3966 static tree
3967 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3968 int flags ATTRIBUTE_UNUSED, bool *no_add)
3970 if (TREE_CODE (*node) != FUNCTION_TYPE
3971 && TREE_CODE (*node) != FUNCTION_DECL
3972 && TREE_CODE (*node) != METHOD_TYPE
3973 && TREE_CODE (*node) != FIELD_DECL
3974 && TREE_CODE (*node) != TYPE_DECL)
3976 static int gave_pointer_note = 0;
3977 static int gave_array_note = 0;
3978 static const char * given_type = NULL;
3980 given_type = get_tree_code_name (TREE_CODE (*node));
3981 if (TREE_CODE (*node) == POINTER_TYPE)
3982 given_type = "pointers";
3983 if (TREE_CODE (*node) == ARRAY_TYPE)
3984 given_type = "arrays";
3986 if (given_type)
3987 warning (0, "%qE attribute only applies to functions, not %s",
3988 name, given_type);
3989 else
3990 warning (0, "%qE attribute only applies to functions",
3991 name);
3992 *no_add = true;
3994 if (TREE_CODE (*node) == POINTER_TYPE
3995 && !gave_pointer_note)
3997 inform (input_location,
3998 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3999 " typedef int (__vliw *vfuncptr) ();");
4000 gave_pointer_note = 1;
4003 if (TREE_CODE (*node) == ARRAY_TYPE
4004 && !gave_array_note)
4006 inform (input_location,
4007 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4008 " typedef int (__vliw *vfuncptr[]) ();");
4009 gave_array_note = 1;
4012 if (!TARGET_VLIW)
4013 error ("VLIW functions are not allowed without a VLIW configuration");
4014 return NULL_TREE;
4017 static const struct attribute_spec mep_attribute_table[11] =
4019 /* name min max decl type func handler
4020 affects_type_identity */
4021 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4022 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4023 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4024 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4025 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4026 false },
4027 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4028 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4029 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4030 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4031 { NULL, 0, 0, false, false, false, NULL, false }
4034 static bool
4035 mep_function_attribute_inlinable_p (const_tree callee)
4037 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4038 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4039 return (lookup_attribute ("disinterrupt", attrs) == 0
4040 && lookup_attribute ("interrupt", attrs) == 0);
4043 static bool
4044 mep_can_inline_p (tree caller, tree callee)
4046 if (TREE_CODE (callee) == ADDR_EXPR)
4047 callee = TREE_OPERAND (callee, 0);
4049 if (!mep_vliw_function_p (caller)
4050 && mep_vliw_function_p (callee))
4052 return false;
4054 return true;
4057 #define FUNC_CALL 1
4058 #define FUNC_DISINTERRUPT 2
4061 struct GTY(()) pragma_entry {
4062 int used;
4063 int flag;
4064 const char *funcname;
4066 typedef struct pragma_entry pragma_entry;
4068 /* Hash table of farcall-tagged sections. */
4069 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4071 static int
4072 pragma_entry_eq (const void *p1, const void *p2)
4074 const pragma_entry *old = (const pragma_entry *) p1;
4075 const char *new_name = (const char *) p2;
4077 return strcmp (old->funcname, new_name) == 0;
4080 static hashval_t
4081 pragma_entry_hash (const void *p)
4083 const pragma_entry *old = (const pragma_entry *) p;
4084 return htab_hash_string (old->funcname);
4087 static void
4088 mep_note_pragma_flag (const char *funcname, int flag)
4090 pragma_entry **slot;
4092 if (!pragma_htab)
4093 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4094 pragma_entry_eq, NULL);
4096 slot = (pragma_entry **)
4097 htab_find_slot_with_hash (pragma_htab, funcname,
4098 htab_hash_string (funcname), INSERT);
4100 if (!*slot)
4102 *slot = ggc_alloc<pragma_entry> ();
4103 (*slot)->flag = 0;
4104 (*slot)->used = 0;
4105 (*slot)->funcname = ggc_strdup (funcname);
4107 (*slot)->flag |= flag;
4110 static bool
4111 mep_lookup_pragma_flag (const char *funcname, int flag)
4113 pragma_entry **slot;
4115 if (!pragma_htab)
4116 return false;
4118 if (funcname[0] == '@' && funcname[2] == '.')
4119 funcname += 3;
4121 slot = (pragma_entry **)
4122 htab_find_slot_with_hash (pragma_htab, funcname,
4123 htab_hash_string (funcname), NO_INSERT);
4124 if (slot && *slot && ((*slot)->flag & flag))
4126 (*slot)->used |= flag;
4127 return true;
4129 return false;
4132 bool
4133 mep_lookup_pragma_call (const char *funcname)
4135 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4138 void
4139 mep_note_pragma_call (const char *funcname)
4141 mep_note_pragma_flag (funcname, FUNC_CALL);
4144 bool
4145 mep_lookup_pragma_disinterrupt (const char *funcname)
4147 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4150 void
4151 mep_note_pragma_disinterrupt (const char *funcname)
4153 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4156 static int
4157 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4159 const pragma_entry *d = (const pragma_entry *)(*slot);
4161 if ((d->flag & FUNC_DISINTERRUPT)
4162 && !(d->used & FUNC_DISINTERRUPT))
4163 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4164 return 1;
4167 void
4168 mep_file_cleanups (void)
4170 if (pragma_htab)
4171 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4174 /* These three functions provide a bridge between the pramgas that
4175 affect register classes, and the functions that maintain them. We
4176 can't call those functions directly as pragma handling is part of
4177 the front end and doesn't have direct access to them. */
4179 void
4180 mep_save_register_info (void)
4182 save_register_info ();
4185 void
4186 mep_reinit_regs (void)
4188 reinit_regs ();
4191 void
4192 mep_init_regs (void)
4194 init_regs ();
4199 static int
4200 mep_attrlist_to_encoding (tree list, tree decl)
4202 if (mep_multiple_address_regions (list, false) > 1)
4204 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4205 TREE_PURPOSE (TREE_CHAIN (list)),
4206 DECL_NAME (decl),
4207 DECL_SOURCE_LINE (decl));
4208 TREE_CHAIN (list) = NULL_TREE;
4211 while (list)
4213 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4214 return 'b';
4215 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4216 return 't';
4217 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4218 return 'n';
4219 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4220 return 'f';
4221 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4223 if (TREE_VALUE (list)
4224 && TREE_VALUE (TREE_VALUE (list))
4225 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4227 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4228 if (location >= 0
4229 && location <= 0x1000000)
4230 return 'i';
4232 return 'I';
4234 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4235 return 'c';
4236 list = TREE_CHAIN (list);
4238 if (TARGET_TF
4239 && TREE_CODE (decl) == FUNCTION_DECL
4240 && DECL_SECTION_NAME (decl) == 0)
4241 return 'f';
4242 return 0;
4245 static int
4246 mep_comp_type_attributes (const_tree t1, const_tree t2)
4248 int vliw1, vliw2;
4250 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4251 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4253 if (vliw1 != vliw2)
4254 return 0;
4256 return 1;
4259 static void
4260 mep_insert_attributes (tree decl, tree *attributes)
4262 int size;
4263 const char *secname = 0;
4264 tree attrib, attrlist;
4265 char encoding;
4267 if (TREE_CODE (decl) == FUNCTION_DECL)
4269 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4271 if (mep_lookup_pragma_disinterrupt (funcname))
4273 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4274 *attributes = chainon (*attributes, attrib);
4278 if (TREE_CODE (decl) != VAR_DECL
4279 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4280 return;
4282 if (TREE_READONLY (decl) && TARGET_DC)
4283 /* -mdc means that const variables default to the near section,
4284 regardless of the size cutoff. */
4285 return;
4287 /* User specified an attribute, so override the default.
4288 Ignore storage attribute of pointed to variable. char __far * x; */
4289 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4291 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4292 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4293 else if (DECL_ATTRIBUTES (decl) && *attributes)
4294 DECL_ATTRIBUTES (decl) = NULL_TREE;
4297 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4298 encoding = mep_attrlist_to_encoding (attrlist, decl);
4299 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4301 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4302 encoding = mep_attrlist_to_encoding (attrlist, decl);
4304 if (encoding)
4306 /* This means that the declaration has a specific section
4307 attribute, so we should not apply the default rules. */
4309 if (encoding == 'i' || encoding == 'I')
4311 tree attr = lookup_attribute ("io", attrlist);
4312 if (attr
4313 && TREE_VALUE (attr)
4314 && TREE_VALUE (TREE_VALUE(attr)))
4316 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4317 static tree previous_value = 0;
4318 static int previous_location = 0;
4319 static tree previous_name = 0;
4321 /* We take advantage of the fact that gcc will reuse the
4322 same tree pointer when applying an attribute to a
4323 list of decls, but produce a new tree for attributes
4324 on separate source lines, even when they're textually
4325 identical. This is the behavior we want. */
4326 if (TREE_VALUE (attr) == previous_value
4327 && location == previous_location)
4329 warning(0, "__io address 0x%x is the same for %qE and %qE",
4330 location, previous_name, DECL_NAME (decl));
4332 previous_name = DECL_NAME (decl);
4333 previous_location = location;
4334 previous_value = TREE_VALUE (attr);
4337 return;
4341 /* Declarations of arrays can change size. Don't trust them. */
4342 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4343 size = 0;
4344 else
4345 size = int_size_in_bytes (TREE_TYPE (decl));
4347 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4349 if (TREE_PUBLIC (decl)
4350 || DECL_EXTERNAL (decl)
4351 || TREE_STATIC (decl))
4353 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4354 int key = 0;
4356 while (*name)
4357 key += *name++;
4359 switch (key & 3)
4361 case 0:
4362 secname = "based";
4363 break;
4364 case 1:
4365 secname = "tiny";
4366 break;
4367 case 2:
4368 secname = "far";
4369 break;
4370 default:
4375 else
4377 if (size <= mep_based_cutoff && size > 0)
4378 secname = "based";
4379 else if (size <= mep_tiny_cutoff && size > 0)
4380 secname = "tiny";
4381 else if (TARGET_L)
4382 secname = "far";
4385 if (mep_const_section && TREE_READONLY (decl))
4387 if (strcmp (mep_const_section, "tiny") == 0)
4388 secname = "tiny";
4389 else if (strcmp (mep_const_section, "near") == 0)
4390 return;
4391 else if (strcmp (mep_const_section, "far") == 0)
4392 secname = "far";
4395 if (!secname)
4396 return;
4398 if (!mep_multiple_address_regions (*attributes, true)
4399 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4401 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4403 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4404 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4405 and mep_validate_based_tiny. */
4406 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4410 static void
4411 mep_encode_section_info (tree decl, rtx rtl, int first)
4413 rtx rtlname;
4414 const char *oldname;
4415 const char *secname;
4416 char encoding;
4417 char *newname;
4418 tree idp;
4419 int maxsize;
4420 tree type;
4421 tree mep_attributes;
4423 if (! first)
4424 return;
4426 if (TREE_CODE (decl) != VAR_DECL
4427 && TREE_CODE (decl) != FUNCTION_DECL)
4428 return;
4430 rtlname = XEXP (rtl, 0);
4431 if (GET_CODE (rtlname) == SYMBOL_REF)
4432 oldname = XSTR (rtlname, 0);
4433 else if (GET_CODE (rtlname) == MEM
4434 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4435 oldname = XSTR (XEXP (rtlname, 0), 0);
4436 else
4437 gcc_unreachable ();
4439 type = TREE_TYPE (decl);
4440 if (type == error_mark_node)
4441 return;
4442 mep_attributes = MEP_ATTRIBUTES (decl);
4444 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4446 if (encoding)
4448 newname = (char *) alloca (strlen (oldname) + 4);
4449 sprintf (newname, "@%c.%s", encoding, oldname);
4450 idp = get_identifier (newname);
4451 XEXP (rtl, 0) =
4452 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4453 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4454 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4456 switch (encoding)
4458 case 'b':
4459 maxsize = 128;
4460 secname = "based";
4461 break;
4462 case 't':
4463 maxsize = 65536;
4464 secname = "tiny";
4465 break;
4466 case 'n':
4467 maxsize = 0x1000000;
4468 secname = "near";
4469 break;
4470 default:
4471 maxsize = 0;
4472 secname = 0;
4473 break;
4475 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4477 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4478 oldname,
4479 (long) int_size_in_bytes (TREE_TYPE (decl)),
4480 secname,
4481 maxsize);
4486 const char *
4487 mep_strip_name_encoding (const char *sym)
4489 while (1)
4491 if (*sym == '*')
4492 sym++;
4493 else if (*sym == '@' && sym[2] == '.')
4494 sym += 3;
4495 else
4496 return sym;
4500 static section *
4501 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4502 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4504 int readonly = 1;
4505 int encoding;
4507 switch (TREE_CODE (decl))
4509 case VAR_DECL:
4510 if (!TREE_READONLY (decl)
4511 || TREE_SIDE_EFFECTS (decl)
4512 || !DECL_INITIAL (decl)
4513 || (DECL_INITIAL (decl) != error_mark_node
4514 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4515 readonly = 0;
4516 break;
4517 case CONSTRUCTOR:
4518 if (! TREE_CONSTANT (decl))
4519 readonly = 0;
4520 break;
4522 default:
4523 break;
4526 if (TREE_CODE (decl) == FUNCTION_DECL)
4528 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4530 if (name[0] == '@' && name[2] == '.')
4531 encoding = name[1];
4532 else
4533 encoding = 0;
4535 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4536 mep_unique_section (decl, 0);
4537 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4539 if (encoding == 'f')
4540 return vftext_section;
4541 else
4542 return vtext_section;
4544 else if (encoding == 'f')
4545 return ftext_section;
4546 else
4547 return text_section;
4550 if (TREE_CODE (decl) == VAR_DECL)
4552 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4554 if (name[0] == '@' && name[2] == '.')
4555 switch (name[1])
4557 case 'b':
4558 return based_section;
4560 case 't':
4561 if (readonly)
4562 return srodata_section;
4563 if (DECL_INITIAL (decl))
4564 return sdata_section;
4565 return tinybss_section;
4567 case 'f':
4568 if (readonly)
4569 return frodata_section;
4570 return far_section;
4572 case 'i':
4573 case 'I':
4574 error_at (DECL_SOURCE_LOCATION (decl),
4575 "variable %D of type %<io%> must be uninitialized", decl);
4576 return data_section;
4578 case 'c':
4579 error_at (DECL_SOURCE_LOCATION (decl),
4580 "variable %D of type %<cb%> must be uninitialized", decl);
4581 return data_section;
4585 if (readonly)
4586 return readonly_data_section;
4588 return data_section;
4591 static void
4592 mep_unique_section (tree decl, int reloc)
4594 static const char *prefixes[][2] =
4596 { ".text.", ".gnu.linkonce.t." },
4597 { ".rodata.", ".gnu.linkonce.r." },
4598 { ".data.", ".gnu.linkonce.d." },
4599 { ".based.", ".gnu.linkonce.based." },
4600 { ".sdata.", ".gnu.linkonce.s." },
4601 { ".far.", ".gnu.linkonce.far." },
4602 { ".ftext.", ".gnu.linkonce.ft." },
4603 { ".frodata.", ".gnu.linkonce.frd." },
4604 { ".srodata.", ".gnu.linkonce.srd." },
4605 { ".vtext.", ".gnu.linkonce.v." },
4606 { ".vftext.", ".gnu.linkonce.vf." }
4608 int sec = 2; /* .data */
4609 int len;
4610 const char *name, *prefix;
4611 char *string;
4613 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4614 if (DECL_RTL (decl))
4615 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4617 if (TREE_CODE (decl) == FUNCTION_DECL)
4619 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4620 sec = 9; /* .vtext */
4621 else
4622 sec = 0; /* .text */
4624 else if (decl_readonly_section (decl, reloc))
4625 sec = 1; /* .rodata */
4627 if (name[0] == '@' && name[2] == '.')
4629 switch (name[1])
4631 case 'b':
4632 sec = 3; /* .based */
4633 break;
4634 case 't':
4635 if (sec == 1)
4636 sec = 8; /* .srodata */
4637 else
4638 sec = 4; /* .sdata */
4639 break;
4640 case 'f':
4641 if (sec == 0)
4642 sec = 6; /* .ftext */
4643 else if (sec == 9)
4644 sec = 10; /* .vftext */
4645 else if (sec == 1)
4646 sec = 7; /* .frodata */
4647 else
4648 sec = 5; /* .far. */
4649 break;
4651 name += 3;
4654 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4655 len = strlen (name) + strlen (prefix);
4656 string = (char *) alloca (len + 1);
4658 sprintf (string, "%s%s", prefix, name);
4660 set_decl_section_name (decl, string);
4663 /* Given a decl, a section name, and whether the decl initializer
4664 has relocs, choose attributes for the section. */
4666 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4668 static unsigned int
4669 mep_section_type_flags (tree decl, const char *name, int reloc)
4671 unsigned int flags = default_section_type_flags (decl, name, reloc);
4673 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4674 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4675 flags |= SECTION_MEP_VLIW;
4677 return flags;
4680 /* Switch to an arbitrary section NAME with attributes as specified
4681 by FLAGS. ALIGN specifies any known alignment requirements for
4682 the section; 0 if the default should be used.
4684 Differs from the standard ELF version only in support of VLIW mode. */
4686 static void
4687 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4689 char flagchars[8], *f = flagchars;
4690 const char *type;
4692 if (!(flags & SECTION_DEBUG))
4693 *f++ = 'a';
4694 if (flags & SECTION_WRITE)
4695 *f++ = 'w';
4696 if (flags & SECTION_CODE)
4697 *f++ = 'x';
4698 if (flags & SECTION_SMALL)
4699 *f++ = 's';
4700 if (flags & SECTION_MEP_VLIW)
4701 *f++ = 'v';
4702 *f = '\0';
4704 if (flags & SECTION_BSS)
4705 type = "nobits";
4706 else
4707 type = "progbits";
4709 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4710 name, flagchars, type);
4712 if (flags & SECTION_CODE)
4713 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4714 asm_out_file);
4717 void
4718 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4719 int size, int align, int global)
4721 /* We intentionally don't use mep_section_tag() here. */
4722 if (name[0] == '@'
4723 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4724 && name[2] == '.')
4726 int location = -1;
4727 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4728 DECL_ATTRIBUTES (decl));
4729 if (attr
4730 && TREE_VALUE (attr)
4731 && TREE_VALUE (TREE_VALUE(attr)))
4732 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4733 if (location == -1)
4734 return;
4735 if (global)
4737 fprintf (stream, "\t.globl\t");
4738 assemble_name (stream, name);
4739 fprintf (stream, "\n");
4741 assemble_name (stream, name);
4742 fprintf (stream, " = %d\n", location);
4743 return;
4745 if (name[0] == '@' && name[2] == '.')
4747 const char *sec = 0;
4748 switch (name[1])
4750 case 'b':
4751 switch_to_section (based_section);
4752 sec = ".based";
4753 break;
4754 case 't':
4755 switch_to_section (tinybss_section);
4756 sec = ".sbss";
4757 break;
4758 case 'f':
4759 switch_to_section (farbss_section);
4760 sec = ".farbss";
4761 break;
4763 if (sec)
4765 const char *name2;
4766 int p2align = 0;
4768 while (align > BITS_PER_UNIT)
4770 align /= 2;
4771 p2align ++;
4773 name2 = targetm.strip_name_encoding (name);
4774 if (global)
4775 fprintf (stream, "\t.globl\t%s\n", name2);
4776 fprintf (stream, "\t.p2align %d\n", p2align);
4777 fprintf (stream, "\t.type\t%s,@object\n", name2);
4778 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4779 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4780 return;
4784 if (!global)
4786 fprintf (stream, "\t.local\t");
4787 assemble_name (stream, name);
4788 fprintf (stream, "\n");
4790 fprintf (stream, "\t.comm\t");
4791 assemble_name (stream, name);
4792 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4795 /* Trampolines. */
4797 static void
4798 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4800 rtx addr = XEXP (m_tramp, 0);
4801 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4803 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4804 LCT_NORMAL, VOIDmode, 3,
4805 addr, Pmode,
4806 fnaddr, Pmode,
4807 static_chain, Pmode);
4810 /* Experimental Reorg. */
4812 static bool
4813 mep_mentioned_p (rtx in,
4814 rtx reg, /* NULL for mem */
4815 int modes_too) /* if nonzero, modes must match also. */
4817 const char *fmt;
4818 int i;
4819 enum rtx_code code;
4821 if (in == 0)
4822 return false;
4823 if (reg && GET_CODE (reg) != REG)
4824 return false;
4826 if (GET_CODE (in) == LABEL_REF)
4827 return (reg == 0);
4829 code = GET_CODE (in);
4831 switch (code)
4833 case MEM:
4834 if (reg)
4835 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4836 return true;
4838 case REG:
4839 if (!reg)
4840 return false;
4841 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4842 return false;
4843 return (REGNO (in) == REGNO (reg));
4845 case SCRATCH:
4846 case CC0:
4847 case PC:
4848 case CONST_INT:
4849 case CONST_DOUBLE:
4850 return false;
4852 default:
4853 break;
4856 /* Set's source should be read-only. */
4857 if (code == SET && !reg)
4858 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4860 fmt = GET_RTX_FORMAT (code);
4862 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4864 if (fmt[i] == 'E')
4866 register int j;
4867 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4868 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4869 return true;
4871 else if (fmt[i] == 'e'
4872 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4873 return true;
4875 return false;
4878 #define EXPERIMENTAL_REGMOVE_REORG 1
4880 #if EXPERIMENTAL_REGMOVE_REORG
4882 static int
4883 mep_compatible_reg_class (int r1, int r2)
4885 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4886 return 1;
4887 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4888 return 1;
4889 return 0;
4892 static void
4893 mep_reorg_regmove (rtx_insn *insns)
4895 rtx_insn *insn, *next, *follow;
4896 rtx pat, *where;
4897 int count = 0, done = 0, replace, before = 0;
4899 if (dump_file)
4900 for (insn = insns; insn; insn = NEXT_INSN (insn))
4901 if (NONJUMP_INSN_P (insn))
4902 before++;
4904 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4905 set that uses the r2 and r2 dies there. We replace r2 with r1
4906 and see if it's still a valid insn. If so, delete the first set.
4907 Copied from reorg.c. */
4909 while (!done)
4911 done = 1;
4912 for (insn = insns; insn; insn = next)
4914 next = next_nonnote_nondebug_insn (insn);
4915 if (! NONJUMP_INSN_P (insn))
4916 continue;
4917 pat = PATTERN (insn);
4919 replace = 0;
4921 if (GET_CODE (pat) == SET
4922 && GET_CODE (SET_SRC (pat)) == REG
4923 && GET_CODE (SET_DEST (pat)) == REG
4924 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4925 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4927 follow = next_nonnote_nondebug_insn (insn);
4928 if (dump_file)
4929 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4931 while (follow && NONJUMP_INSN_P (follow)
4932 && GET_CODE (PATTERN (follow)) == SET
4933 && !dead_or_set_p (follow, SET_SRC (pat))
4934 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4935 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4937 if (dump_file)
4938 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4939 follow = next_nonnote_insn (follow);
4942 if (dump_file)
4943 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4944 if (follow && NONJUMP_INSN_P (follow)
4945 && GET_CODE (PATTERN (follow)) == SET
4946 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4948 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4950 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4952 replace = 1;
4953 where = & SET_SRC (PATTERN (follow));
4956 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4958 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4960 replace = 1;
4961 where = & PATTERN (follow);
4967 /* If so, follow is the corresponding insn */
4968 if (replace)
4970 if (dump_file)
4972 rtx_insn *x;
4974 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4975 for (x = insn; x ;x = NEXT_INSN (x))
4977 print_rtl_single (dump_file, x);
4978 if (x == follow)
4979 break;
4980 fprintf (dump_file, "\n");
4984 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4985 follow, where))
4987 count ++;
4988 delete_insn (insn);
4989 if (dump_file)
4991 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4992 print_rtl_single (dump_file, follow);
4994 done = 0;
5000 if (dump_file)
5002 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5003 fprintf (dump_file, "=====\n");
5006 #endif
5009 /* Figure out where to put LABEL, which is the label for a repeat loop.
5010 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5011 the loop ends just before LAST_INSN. If SHARED, insns other than the
5012 "repeat" might use LABEL to jump to the loop's continuation point.
5014 Return the last instruction in the adjusted loop. */
5016 static rtx_insn *
5017 mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
5018 bool including, bool shared)
5020 rtx_insn *next, *prev;
5021 int count = 0, code, icode;
5023 if (dump_file)
5024 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5025 INSN_UID (last_insn));
5027 /* Set PREV to the last insn in the loop. */
5028 prev = last_insn;
5029 if (!including)
5030 prev = PREV_INSN (prev);
5032 /* Set NEXT to the next insn after the repeat label. */
5033 next = last_insn;
5034 if (!shared)
5035 while (prev != 0)
5037 code = GET_CODE (prev);
5038 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5039 break;
5041 if (INSN_P (prev))
5043 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5044 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
5046 /* Other insns that should not be in the last two opcodes. */
5047 icode = recog_memoized (prev);
5048 if (icode < 0
5049 || icode == CODE_FOR_repeat
5050 || icode == CODE_FOR_erepeat
5051 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5052 break;
5054 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5055 is the second instruction in a VLIW bundle. In that case,
5056 loop again: if the first instruction also satisfies the
5057 conditions above then we will reach here again and put
5058 both of them into the repeat epilogue. Otherwise both
5059 should remain outside. */
5060 if (GET_MODE (prev) != BImode)
5062 count++;
5063 next = prev;
5064 if (dump_file)
5065 print_rtl_single (dump_file, next);
5066 if (count == 2)
5067 break;
5070 prev = PREV_INSN (prev);
5073 /* See if we're adding the label immediately after the repeat insn.
5074 If so, we need to separate them with a nop. */
5075 prev = prev_real_insn (next);
5076 if (prev)
5077 switch (recog_memoized (prev))
5079 case CODE_FOR_repeat:
5080 case CODE_FOR_erepeat:
5081 if (dump_file)
5082 fprintf (dump_file, "Adding nop inside loop\n");
5083 emit_insn_before (gen_nop (), next);
5084 break;
5086 default:
5087 break;
5090 /* Insert the label. */
5091 emit_label_before (label, next);
5093 /* Insert the nops. */
5094 if (dump_file && count < 2)
5095 fprintf (dump_file, "Adding %d nop%s\n\n",
5096 2 - count, count == 1 ? "" : "s");
5098 for (; count < 2; count++)
5099 if (including)
5100 last_insn = emit_insn_after (gen_nop (), last_insn);
5101 else
5102 emit_insn_before (gen_nop (), last_insn);
5104 return last_insn;
5108 void
5109 mep_emit_doloop (rtx *operands, int is_end)
5111 rtx tag;
5113 if (cfun->machine->doloop_tags == 0
5114 || cfun->machine->doloop_tag_from_end == is_end)
5116 cfun->machine->doloop_tags++;
5117 cfun->machine->doloop_tag_from_end = is_end;
5120 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5121 if (is_end)
5122 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5123 else
5124 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5128 /* Code for converting doloop_begins and doloop_ends into valid
5129 MeP instructions. A doloop_begin is just a placeholder:
5131 $count = unspec ($count)
5133 where $count is initially the number of iterations - 1.
5134 doloop_end has the form:
5136 if ($count-- == 0) goto label
5138 The counter variable is private to the doloop insns, nothing else
5139 relies on its value.
5141 There are three cases, in decreasing order of preference:
5143 1. A loop has exactly one doloop_begin and one doloop_end.
5144 The doloop_end branches to the first instruction after
5145 the doloop_begin.
5147 In this case we can replace the doloop_begin with a repeat
5148 instruction and remove the doloop_end. I.e.:
5150 $count1 = unspec ($count1)
5151 label:
5153 insn1
5154 insn2
5155 if ($count2-- == 0) goto label
5157 becomes:
5159 repeat $count1,repeat_label
5160 label:
5162 repeat_label:
5163 insn1
5164 insn2
5165 # end repeat
5167 2. As for (1), except there are several doloop_ends. One of them
5168 (call it X) falls through to a label L. All the others fall
5169 through to branches to L.
5171 In this case, we remove X and replace the other doloop_ends
5172 with branches to the repeat label. For example:
5174 $count1 = unspec ($count1)
5175 start:
5177 if ($count2-- == 0) goto label
5178 end:
5180 if ($count3-- == 0) goto label
5181 goto end
5183 becomes:
5185 repeat $count1,repeat_label
5186 start:
5188 repeat_label:
5191 # end repeat
5192 end:
5194 goto repeat_label
5196 3. The fallback case. Replace doloop_begins with:
5198 $count = $count + 1
5200 Replace doloop_ends with the equivalent of:
5202 $count = $count - 1
5203 if ($count == 0) goto label
5205 Note that this might need a scratch register if $count
5206 is stored in memory. */
5208 /* A structure describing one doloop_begin. */
5209 struct mep_doloop_begin {
5210 /* The next doloop_begin with the same tag. */
5211 struct mep_doloop_begin *next;
5213 /* The instruction itself. */
5214 rtx_insn *insn;
5216 /* The initial counter value. This is known to be a general register. */
5217 rtx counter;
5220 /* A structure describing a doloop_end. */
5221 struct mep_doloop_end {
5222 /* The next doloop_end with the same loop tag. */
5223 struct mep_doloop_end *next;
5225 /* The instruction itself. */
5226 rtx_insn *insn;
5228 /* The first instruction after INSN when the branch isn't taken. */
5229 rtx_insn *fallthrough;
5231 /* The location of the counter value. Since doloop_end_internal is a
5232 jump instruction, it has to allow the counter to be stored anywhere
5233 (any non-fixed register or memory location). */
5234 rtx counter;
5236 /* The target label (the place where the insn branches when the counter
5237 isn't zero). */
5238 rtx label;
5240 /* A scratch register. Only available when COUNTER isn't stored
5241 in a general register. */
5242 rtx scratch;
5246 /* One do-while loop. */
5247 struct mep_doloop {
5248 /* All the doloop_begins for this loop (in no particular order). */
5249 struct mep_doloop_begin *begin;
5251 /* All the doloop_ends. When there is more than one, arrange things
5252 so that the first one is the most likely to be X in case (2) above. */
5253 struct mep_doloop_end *end;
5257 /* Return true if LOOP can be converted into repeat/repeat_end form
5258 (that is, if it matches cases (1) or (2) above). */
5260 static bool
5261 mep_repeat_loop_p (struct mep_doloop *loop)
5263 struct mep_doloop_end *end;
5264 rtx fallthrough;
5266 /* There must be exactly one doloop_begin and at least one doloop_end. */
5267 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5268 return false;
5270 /* The first doloop_end (X) must branch back to the insn after
5271 the doloop_begin. */
5272 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5273 return false;
5275 /* All the other doloop_ends must branch to the same place as X.
5276 When the branch isn't taken, they must jump to the instruction
5277 after X. */
5278 fallthrough = loop->end->fallthrough;
5279 for (end = loop->end->next; end != 0; end = end->next)
5280 if (end->label != loop->end->label
5281 || !simplejump_p (end->fallthrough)
5282 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5283 return false;
5285 return true;
5289 /* The main repeat reorg function. See comment above for details. */
5291 static void
5292 mep_reorg_repeat (rtx_insn *insns)
5294 rtx_insn *insn;
5295 struct mep_doloop *loops, *loop;
5296 struct mep_doloop_begin *begin;
5297 struct mep_doloop_end *end;
5299 /* Quick exit if we haven't created any loops. */
5300 if (cfun->machine->doloop_tags == 0)
5301 return;
5303 /* Create an array of mep_doloop structures. */
5304 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5305 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5307 /* Search the function for do-while insns and group them by loop tag. */
5308 for (insn = insns; insn; insn = NEXT_INSN (insn))
5309 if (INSN_P (insn))
5310 switch (recog_memoized (insn))
5312 case CODE_FOR_doloop_begin_internal:
5313 insn_extract (insn);
5314 loop = &loops[INTVAL (recog_data.operand[2])];
5316 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5317 begin->next = loop->begin;
5318 begin->insn = insn;
5319 begin->counter = recog_data.operand[0];
5321 loop->begin = begin;
5322 break;
5324 case CODE_FOR_doloop_end_internal:
5325 insn_extract (insn);
5326 loop = &loops[INTVAL (recog_data.operand[2])];
5328 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5329 end->insn = insn;
5330 end->fallthrough = next_real_insn (insn);
5331 end->counter = recog_data.operand[0];
5332 end->label = recog_data.operand[1];
5333 end->scratch = recog_data.operand[3];
5335 /* If this insn falls through to an unconditional jump,
5336 give it a lower priority than the others. */
5337 if (loop->end != 0 && simplejump_p (end->fallthrough))
5339 end->next = loop->end->next;
5340 loop->end->next = end;
5342 else
5344 end->next = loop->end;
5345 loop->end = end;
5347 break;
5350 /* Convert the insns for each loop in turn. */
5351 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5352 if (mep_repeat_loop_p (loop))
5354 /* Case (1) or (2). */
5355 rtx_code_label *repeat_label;
5356 rtx label_ref;
5358 /* Create a new label for the repeat insn. */
5359 repeat_label = gen_label_rtx ();
5361 /* Replace the doloop_begin with a repeat. */
5362 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5363 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5364 loop->begin->insn);
5365 delete_insn (loop->begin->insn);
5367 /* Insert the repeat label before the first doloop_end.
5368 Fill the gap with nops if there are other doloop_ends. */
5369 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5370 false, loop->end->next != 0);
5372 /* Emit a repeat_end (to improve the readability of the output). */
5373 emit_insn_before (gen_repeat_end (), loop->end->insn);
5375 /* Delete the first doloop_end. */
5376 delete_insn (loop->end->insn);
5378 /* Replace the others with branches to REPEAT_LABEL. */
5379 for (end = loop->end->next; end != 0; end = end->next)
5381 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5382 delete_insn (end->insn);
5383 delete_insn (end->fallthrough);
5386 else
5388 /* Case (3). First replace all the doloop_begins with increment
5389 instructions. */
5390 for (begin = loop->begin; begin != 0; begin = begin->next)
5392 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5393 begin->counter, const1_rtx),
5394 begin->insn);
5395 delete_insn (begin->insn);
5398 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5399 for (end = loop->end; end != 0; end = end->next)
5401 rtx reg;
5403 start_sequence ();
5405 /* Load the counter value into a general register. */
5406 reg = end->counter;
5407 if (!REG_P (reg) || REGNO (reg) > 15)
5409 reg = end->scratch;
5410 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5413 /* Decrement the counter. */
5414 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5415 constm1_rtx));
5417 /* Copy it back to its original location. */
5418 if (reg != end->counter)
5419 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5421 /* Jump back to the start label. */
5422 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5423 end->label));
5424 JUMP_LABEL (insn) = end->label;
5425 LABEL_NUSES (end->label)++;
5427 /* Emit the whole sequence before the doloop_end. */
5428 insn = get_insns ();
5429 end_sequence ();
5430 emit_insn_before (insn, end->insn);
5432 /* Delete the doloop_end. */
5433 delete_insn (end->insn);
5439 static bool
5440 mep_invertable_branch_p (rtx_insn *insn)
5442 rtx cond, set;
5443 enum rtx_code old_code;
5444 int i;
5446 set = PATTERN (insn);
5447 if (GET_CODE (set) != SET)
5448 return false;
5449 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5450 return false;
5451 cond = XEXP (XEXP (set, 1), 0);
5452 old_code = GET_CODE (cond);
5453 switch (old_code)
5455 case EQ:
5456 PUT_CODE (cond, NE);
5457 break;
5458 case NE:
5459 PUT_CODE (cond, EQ);
5460 break;
5461 case LT:
5462 PUT_CODE (cond, GE);
5463 break;
5464 case GE:
5465 PUT_CODE (cond, LT);
5466 break;
5467 default:
5468 return false;
5470 INSN_CODE (insn) = -1;
5471 i = recog_memoized (insn);
5472 PUT_CODE (cond, old_code);
5473 INSN_CODE (insn) = -1;
5474 return i >= 0;
5477 static void
5478 mep_invert_branch (rtx_insn *insn, rtx_insn *after)
5480 rtx cond, set, label;
5481 int i;
5483 set = PATTERN (insn);
5485 gcc_assert (GET_CODE (set) == SET);
5486 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5488 cond = XEXP (XEXP (set, 1), 0);
5489 switch (GET_CODE (cond))
5491 case EQ:
5492 PUT_CODE (cond, NE);
5493 break;
5494 case NE:
5495 PUT_CODE (cond, EQ);
5496 break;
5497 case LT:
5498 PUT_CODE (cond, GE);
5499 break;
5500 case GE:
5501 PUT_CODE (cond, LT);
5502 break;
5503 default:
5504 gcc_unreachable ();
5506 label = gen_label_rtx ();
5507 emit_label_after (label, after);
5508 for (i=1; i<=2; i++)
5509 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5511 rtx ref = XEXP (XEXP (set, 1), i);
5512 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5513 delete_insn (XEXP (ref, 0));
5514 XEXP (ref, 0) = label;
5515 LABEL_NUSES (label) ++;
5516 JUMP_LABEL (insn) = label;
5518 INSN_CODE (insn) = -1;
5519 i = recog_memoized (insn);
5520 gcc_assert (i >= 0);
5523 static void
5524 mep_reorg_erepeat (rtx_insn *insns)
5526 rtx_insn *insn, *prev;
5527 rtx_code_label *l;
5528 rtx x;
5529 int count;
5531 for (insn = insns; insn; insn = NEXT_INSN (insn))
5532 if (JUMP_P (insn)
5533 && mep_invertable_branch_p (insn))
5535 if (dump_file)
5537 fprintf (dump_file, "\n------------------------------\n");
5538 fprintf (dump_file, "erepeat: considering this jump:\n");
5539 print_rtl_single (dump_file, insn);
5541 count = simplejump_p (insn) ? 0 : 1;
5542 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5544 if (CALL_P (prev) || BARRIER_P (prev))
5545 break;
5547 if (prev == JUMP_LABEL (insn))
5549 rtx_insn *newlast;
5550 if (dump_file)
5551 fprintf (dump_file, "found loop top, %d insns\n", count);
5553 if (LABEL_NUSES (prev) == 1)
5554 /* We're the only user, always safe */ ;
5555 else if (LABEL_NUSES (prev) == 2)
5557 /* See if there's a barrier before this label. If
5558 so, we know nobody inside the loop uses it.
5559 But we must be careful to put the erepeat
5560 *after* the label. */
5561 rtx_insn *barrier;
5562 for (barrier = PREV_INSN (prev);
5563 barrier && NOTE_P (barrier);
5564 barrier = PREV_INSN (barrier))
5566 if (barrier && ! BARRIER_P (barrier))
5567 break;
5569 else
5571 /* We don't know who else, within or without our loop, uses this */
5572 if (dump_file)
5573 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5574 break;
5577 /* Generate a label to be used by the erepat insn. */
5578 l = gen_label_rtx ();
5580 /* Insert the erepeat after INSN's target label. */
5581 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5582 LABEL_NUSES (l)++;
5583 emit_insn_after (x, prev);
5585 /* Insert the erepeat label. */
5586 newlast = (mep_insert_repeat_label_last
5587 (insn, l, !simplejump_p (insn), false));
5588 if (simplejump_p (insn))
5590 emit_insn_before (gen_erepeat_end (), insn);
5591 delete_insn (insn);
5593 else
5595 mep_invert_branch (insn, newlast);
5596 emit_insn_after (gen_erepeat_end (), newlast);
5598 break;
5601 if (LABEL_P (prev))
5603 /* A label is OK if there is exactly one user, and we
5604 can find that user before the next label. */
5605 rtx_insn *user = 0;
5606 int safe = 0;
5607 if (LABEL_NUSES (prev) == 1)
5609 for (user = PREV_INSN (prev);
5610 user && (INSN_P (user) || NOTE_P (user));
5611 user = PREV_INSN (user))
5612 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5614 safe = INSN_UID (user);
5615 break;
5618 if (!safe)
5619 break;
5620 if (dump_file)
5621 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5622 safe, INSN_UID (prev));
5625 if (INSN_P (prev))
5627 count ++;
5631 if (dump_file)
5632 fprintf (dump_file, "\n==============================\n");
5635 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5636 always do this on its own. */
5638 static void
5639 mep_jmp_return_reorg (rtx_insn *insns)
5641 rtx_insn *insn, *label, *ret;
5642 int ret_code;
5644 for (insn = insns; insn; insn = NEXT_INSN (insn))
5645 if (simplejump_p (insn))
5647 /* Find the fist real insn the jump jumps to. */
5648 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
5649 while (ret
5650 && (NOTE_P (ret)
5651 || LABEL_P (ret)
5652 || GET_CODE (PATTERN (ret)) == USE))
5653 ret = NEXT_INSN (ret);
5655 if (ret)
5657 /* Is it a return? */
5658 ret_code = recog_memoized (ret);
5659 if (ret_code == CODE_FOR_return_internal
5660 || ret_code == CODE_FOR_eh_return_internal)
5662 /* It is. Replace the jump with a return. */
5663 LABEL_NUSES (label) --;
5664 if (LABEL_NUSES (label) == 0)
5665 delete_insn (label);
5666 PATTERN (insn) = copy_rtx (PATTERN (ret));
5667 INSN_CODE (insn) = -1;
5674 static void
5675 mep_reorg_addcombine (rtx_insn *insns)
5677 rtx_insn *i, *n;
5679 for (i = insns; i; i = NEXT_INSN (i))
5680 if (INSN_P (i)
5681 && INSN_CODE (i) == CODE_FOR_addsi3
5682 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5683 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5684 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5685 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5687 n = NEXT_INSN (i);
5688 if (INSN_P (n)
5689 && INSN_CODE (n) == CODE_FOR_addsi3
5690 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5691 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5692 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5693 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5695 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5696 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5697 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5698 && ic + nc < 32767
5699 && ic + nc > -32768)
5701 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5702 SET_NEXT_INSN (i) = NEXT_INSN (n);
5703 if (NEXT_INSN (i))
5704 SET_PREV_INSN (NEXT_INSN (i)) = i;
5710 /* If this insn adjusts the stack, return the adjustment, else return
5711 zero. */
5712 static int
5713 add_sp_insn_p (rtx_insn *insn)
5715 rtx pat;
5717 if (! single_set (insn))
5718 return 0;
5719 pat = PATTERN (insn);
5720 if (GET_CODE (SET_DEST (pat)) != REG)
5721 return 0;
5722 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5723 return 0;
5724 if (GET_CODE (SET_SRC (pat)) != PLUS)
5725 return 0;
5726 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5727 return 0;
5728 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5729 return 0;
5730 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5731 return 0;
5732 return INTVAL (XEXP (SET_SRC (pat), 1));
5735 /* Check for trivial functions that set up an unneeded stack
5736 frame. */
5737 static void
5738 mep_reorg_noframe (rtx_insn *insns)
5740 rtx_insn *start_frame_insn;
5741 rtx_insn *end_frame_insn = 0;
5742 int sp_adjust, sp2;
5743 rtx sp;
5745 /* The first insn should be $sp = $sp + N */
5746 while (insns && ! INSN_P (insns))
5747 insns = NEXT_INSN (insns);
5748 if (!insns)
5749 return;
5751 sp_adjust = add_sp_insn_p (insns);
5752 if (sp_adjust == 0)
5753 return;
5755 start_frame_insn = insns;
5756 sp = SET_DEST (PATTERN (start_frame_insn));
5758 insns = next_real_insn (insns);
5760 while (insns)
5762 rtx_insn *next = next_real_insn (insns);
5763 if (!next)
5764 break;
5766 sp2 = add_sp_insn_p (insns);
5767 if (sp2)
5769 if (end_frame_insn)
5770 return;
5771 end_frame_insn = insns;
5772 if (sp2 != -sp_adjust)
5773 return;
5775 else if (mep_mentioned_p (insns, sp, 0))
5776 return;
5777 else if (CALL_P (insns))
5778 return;
5780 insns = next;
5783 if (end_frame_insn)
5785 delete_insn (start_frame_insn);
5786 delete_insn (end_frame_insn);
5790 static void
5791 mep_reorg (void)
5793 rtx_insn *insns = get_insns ();
5795 /* We require accurate REG_DEAD notes. */
5796 compute_bb_for_insn ();
5797 df_note_add_problem ();
5798 df_analyze ();
5800 mep_reorg_addcombine (insns);
5801 #if EXPERIMENTAL_REGMOVE_REORG
5802 /* VLIW packing has been done already, so we can't just delete things. */
5803 if (!mep_vliw_function_p (cfun->decl))
5804 mep_reorg_regmove (insns);
5805 #endif
5806 mep_jmp_return_reorg (insns);
5807 mep_bundle_insns (insns);
5808 mep_reorg_repeat (insns);
5809 if (optimize
5810 && !profile_flag
5811 && !profile_arc_flag
5812 && TARGET_OPT_REPEAT
5813 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5814 mep_reorg_erepeat (insns);
5816 /* This may delete *insns so make sure it's last. */
5817 mep_reorg_noframe (insns);
5819 df_finish_pass (false);
5824 /*----------------------------------------------------------------------*/
5825 /* Builtins */
5826 /*----------------------------------------------------------------------*/
5828 /* Element X gives the index into cgen_insns[] of the most general
5829 implementation of intrinsic X. Unimplemented intrinsics are
5830 mapped to -1. */
5831 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5833 /* Element X gives the index of another instruction that is mapped to
5834 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5835 instruction.
5837 Things are set up so that mep_intrinsic_chain[X] < X. */
5838 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5840 /* The bitmask for the current ISA. The ISA masks are declared
5841 in mep-intrin.h. */
5842 unsigned int mep_selected_isa;
5844 struct mep_config {
5845 const char *config_name;
5846 unsigned int isa;
5849 static struct mep_config mep_configs[] = {
5850 #ifdef COPROC_SELECTION_TABLE
5851 COPROC_SELECTION_TABLE,
5852 #endif
5853 { 0, 0 }
5856 /* Initialize the global intrinsics variables above. */
5858 static void
5859 mep_init_intrinsics (void)
5861 size_t i;
5863 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5864 mep_selected_isa = mep_configs[0].isa;
5865 if (mep_config_string != 0)
5866 for (i = 0; mep_configs[i].config_name; i++)
5867 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5869 mep_selected_isa = mep_configs[i].isa;
5870 break;
5873 /* Assume all intrinsics are unavailable. */
5874 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5875 mep_intrinsic_insn[i] = -1;
5877 /* Build up the global intrinsic tables. */
5878 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5879 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5881 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5882 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5884 /* See whether we can directly move values between one coprocessor
5885 register and another. */
5886 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5887 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5888 mep_have_copro_copro_moves_p = true;
5890 /* See whether we can directly move values between core and
5891 coprocessor registers. */
5892 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5893 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5895 mep_have_core_copro_moves_p = 1;
5898 /* Declare all available intrinsic functions. Called once only. */
5900 static tree cp_data_bus_int_type_node;
5901 static tree opaque_vector_type_node;
5902 static tree v8qi_type_node;
5903 static tree v4hi_type_node;
5904 static tree v2si_type_node;
5905 static tree v8uqi_type_node;
5906 static tree v4uhi_type_node;
5907 static tree v2usi_type_node;
5909 static tree
5910 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5912 switch (cr)
5914 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5915 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5916 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5917 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5918 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5919 case cgen_regnum_operand_type_CHAR: return char_type_node;
5920 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5921 case cgen_regnum_operand_type_SI: return intSI_type_node;
5922 case cgen_regnum_operand_type_DI: return intDI_type_node;
5923 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5924 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5925 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5926 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5927 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5928 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5929 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5930 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5931 default:
5932 return void_type_node;
5936 static void
5937 mep_init_builtins (void)
5939 size_t i;
5941 if (TARGET_64BIT_CR_REGS)
5942 cp_data_bus_int_type_node = long_long_integer_type_node;
5943 else
5944 cp_data_bus_int_type_node = long_integer_type_node;
5946 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5947 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5948 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5949 v2si_type_node = build_vector_type (intSI_type_node, 2);
5950 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5951 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5952 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5954 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5956 add_builtin_type ("cp_vector", opaque_vector_type_node);
5958 add_builtin_type ("cp_v8qi", v8qi_type_node);
5959 add_builtin_type ("cp_v4hi", v4hi_type_node);
5960 add_builtin_type ("cp_v2si", v2si_type_node);
5962 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5963 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5964 add_builtin_type ("cp_v2usi", v2usi_type_node);
5966 /* Intrinsics like mep_cadd3 are implemented with two groups of
5967 instructions, one which uses UNSPECs and one which uses a specific
5968 rtl code such as PLUS. Instructions in the latter group belong
5969 to GROUP_KNOWN_CODE.
5971 In such cases, the intrinsic will have two entries in the global
5972 tables above. The unspec form is accessed using builtin functions
5973 while the specific form is accessed using the mep_* enum in
5974 mep-intrin.h.
5976 The idea is that __cop arithmetic and builtin functions have
5977 different optimization requirements. If mep_cadd3() appears in
5978 the source code, the user will surely except gcc to use cadd3
5979 rather than a work-alike such as add3. However, if the user
5980 just writes "a + b", where a or b are __cop variables, it is
5981 reasonable for gcc to choose a core instruction rather than
5982 cadd3 if it believes that is more optimal. */
5983 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5984 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5985 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5987 tree ret_type = void_type_node;
5988 tree bi_type;
5990 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5991 continue;
5993 if (cgen_insns[i].cret_p)
5994 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5996 bi_type = build_function_type_list (ret_type, NULL_TREE);
5997 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5998 bi_type,
5999 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6003 /* Report the unavailablity of the given intrinsic. */
6005 #if 1
6006 static void
6007 mep_intrinsic_unavailable (int intrinsic)
6009 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6011 if (already_reported_p[intrinsic])
6012 return;
6014 if (mep_intrinsic_insn[intrinsic] < 0)
6015 error ("coprocessor intrinsic %qs is not available in this configuration",
6016 cgen_intrinsics[intrinsic]);
6017 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6018 error ("%qs is not available in VLIW functions",
6019 cgen_intrinsics[intrinsic]);
6020 else
6021 error ("%qs is not available in non-VLIW functions",
6022 cgen_intrinsics[intrinsic]);
6024 already_reported_p[intrinsic] = 1;
6026 #endif
6029 /* See if any implementation of INTRINSIC is available to the
6030 current function. If so, store the most general implementation
6031 in *INSN_PTR and return true. Return false otherwise. */
6033 static bool
6034 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6036 int i;
6038 i = mep_intrinsic_insn[intrinsic];
6039 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6040 i = mep_intrinsic_chain[i];
6042 if (i >= 0)
6044 *insn_ptr = &cgen_insns[i];
6045 return true;
6047 return false;
6051 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6052 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6053 try using a work-alike instead. In this case, the returned insn
6054 may have three operands rather than two. */
6056 static bool
6057 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6059 size_t i;
6061 if (intrinsic == mep_cmov)
6063 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6064 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6065 return true;
6066 return false;
6068 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6072 /* If ARG is a register operand that is the same size as MODE, convert it
6073 to MODE using a subreg. Otherwise return ARG as-is. */
6075 static rtx
6076 mep_convert_arg (enum machine_mode mode, rtx arg)
6078 if (GET_MODE (arg) != mode
6079 && register_operand (arg, VOIDmode)
6080 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6081 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6082 return arg;
6086 /* Apply regnum conversions to ARG using the description given by REGNUM.
6087 Return the new argument on success and null on failure. */
6089 static rtx
6090 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6092 if (regnum->count == 0)
6093 return arg;
6095 if (GET_CODE (arg) != CONST_INT
6096 || INTVAL (arg) < 0
6097 || INTVAL (arg) >= regnum->count)
6098 return 0;
6100 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6104 /* Try to make intrinsic argument ARG match the given operand.
6105 UNSIGNED_P is true if the argument has an unsigned type. */
6107 static rtx
6108 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6109 int unsigned_p)
6111 if (GET_CODE (arg) == CONST_INT)
6113 /* CONST_INTs can only be bound to integer operands. */
6114 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6115 return 0;
6117 else if (GET_CODE (arg) == CONST_DOUBLE)
6118 /* These hold vector constants. */;
6119 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6121 /* If the argument is a different size from what's expected, we must
6122 have a value in the right mode class in order to convert it. */
6123 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6124 return 0;
6126 /* If the operand is an rvalue, promote or demote it to match the
6127 operand's size. This might not need extra instructions when
6128 ARG is a register value. */
6129 if (operand->constraint[0] != '=')
6130 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6133 /* If the operand is an lvalue, bind the operand to a new register.
6134 The caller will copy this value into ARG after the main
6135 instruction. By doing this always, we produce slightly more
6136 optimal code. */
6137 /* But not for control registers. */
6138 if (operand->constraint[0] == '='
6139 && (! REG_P (arg)
6140 || ! (CONTROL_REGNO_P (REGNO (arg))
6141 || CCR_REGNO_P (REGNO (arg))
6142 || CR_REGNO_P (REGNO (arg)))
6144 return gen_reg_rtx (operand->mode);
6146 /* Try simple mode punning. */
6147 arg = mep_convert_arg (operand->mode, arg);
6148 if (operand->predicate (arg, operand->mode))
6149 return arg;
6151 /* See if forcing the argument into a register will make it match. */
6152 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6153 arg = force_reg (operand->mode, arg);
6154 else
6155 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6156 if (operand->predicate (arg, operand->mode))
6157 return arg;
6159 return 0;
6163 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6164 function FNNAME. OPERAND describes the operand to which ARGNUM
6165 is mapped. */
6167 static void
6168 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6169 int argnum, tree fnname)
6171 size_t i;
6173 if (GET_CODE (arg) == CONST_INT)
6174 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6175 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6177 const struct cgen_immediate_predicate *predicate;
6178 HOST_WIDE_INT argval;
6180 predicate = &cgen_immediate_predicates[i];
6181 argval = INTVAL (arg);
6182 if (argval < predicate->lower || argval >= predicate->upper)
6183 error ("argument %d of %qE must be in the range %d...%d",
6184 argnum, fnname, predicate->lower, predicate->upper - 1);
6185 else
6186 error ("argument %d of %qE must be a multiple of %d",
6187 argnum, fnname, predicate->align);
6188 return;
6191 error ("incompatible type for argument %d of %qE", argnum, fnname);
6194 static rtx
6195 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6196 rtx subtarget ATTRIBUTE_UNUSED,
6197 enum machine_mode mode ATTRIBUTE_UNUSED,
6198 int ignore ATTRIBUTE_UNUSED)
6200 rtx pat, op[10], arg[10];
6201 unsigned int a;
6202 int opindex, unsigned_p[10];
6203 tree fndecl, args;
6204 unsigned int n_args;
6205 tree fnname;
6206 const struct cgen_insn *cgen_insn;
6207 const struct insn_data_d *idata;
6208 unsigned int first_arg = 0;
6209 unsigned int builtin_n_args;
6211 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6212 fnname = DECL_NAME (fndecl);
6214 /* Find out which instruction we should emit. Note that some coprocessor
6215 intrinsics may only be available in VLIW mode, or only in normal mode. */
6216 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6218 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6219 return NULL_RTX;
6221 idata = &insn_data[cgen_insn->icode];
6223 builtin_n_args = cgen_insn->num_args;
6225 if (cgen_insn->cret_p)
6227 if (cgen_insn->cret_p > 1)
6228 builtin_n_args ++;
6229 first_arg = 1;
6230 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6231 builtin_n_args --;
6234 /* Evaluate each argument. */
6235 n_args = call_expr_nargs (exp);
6237 if (n_args < builtin_n_args)
6239 error ("too few arguments to %qE", fnname);
6240 return NULL_RTX;
6242 if (n_args > builtin_n_args)
6244 error ("too many arguments to %qE", fnname);
6245 return NULL_RTX;
6248 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6250 tree value;
6252 args = CALL_EXPR_ARG (exp, a - first_arg);
6254 value = args;
6256 #if 0
6257 if (cgen_insn->regnums[a].reference_p)
6259 if (TREE_CODE (value) != ADDR_EXPR)
6261 debug_tree(value);
6262 error ("argument %d of %qE must be an address", a+1, fnname);
6263 return NULL_RTX;
6265 value = TREE_OPERAND (value, 0);
6267 #endif
6269 /* If the argument has been promoted to int, get the unpromoted
6270 value. This is necessary when sub-int memory values are bound
6271 to reference parameters. */
6272 if (TREE_CODE (value) == NOP_EXPR
6273 && TREE_TYPE (value) == integer_type_node
6274 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6275 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6276 < TYPE_PRECISION (TREE_TYPE (value))))
6277 value = TREE_OPERAND (value, 0);
6279 /* If the argument has been promoted to double, get the unpromoted
6280 SFmode value. This is necessary for FMAX support, for example. */
6281 if (TREE_CODE (value) == NOP_EXPR
6282 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6283 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6284 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6285 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6286 value = TREE_OPERAND (value, 0);
6288 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6289 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6290 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6291 if (cgen_insn->regnums[a].reference_p)
6293 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6294 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6296 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6298 if (arg[a] == 0)
6300 error ("argument %d of %qE must be in the range %d...%d",
6301 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6302 return NULL_RTX;
6306 for (a = 0; a < first_arg; a++)
6308 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6309 arg[a] = target;
6310 else
6311 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6314 /* Convert the arguments into a form suitable for the intrinsic.
6315 Report an error if this isn't possible. */
6316 for (opindex = 0; opindex < idata->n_operands; opindex++)
6318 a = cgen_insn->op_mapping[opindex];
6319 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6320 arg[a], unsigned_p[a]);
6321 if (op[opindex] == 0)
6323 mep_incompatible_arg (&idata->operand[opindex],
6324 arg[a], a + 1 - first_arg, fnname);
6325 return NULL_RTX;
6329 /* Emit the instruction. */
6330 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6331 op[5], op[6], op[7], op[8], op[9]);
6333 if (GET_CODE (pat) == SET
6334 && GET_CODE (SET_DEST (pat)) == PC
6335 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6336 emit_jump_insn (pat);
6337 else
6338 emit_insn (pat);
6340 /* Copy lvalues back to their final locations. */
6341 for (opindex = 0; opindex < idata->n_operands; opindex++)
6342 if (idata->operand[opindex].constraint[0] == '=')
6344 a = cgen_insn->op_mapping[opindex];
6345 if (a >= first_arg)
6347 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6348 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6349 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6350 op[opindex]));
6351 else
6353 /* First convert the operand to the right mode, then copy it
6354 into the destination. Doing the conversion as a separate
6355 step (rather than using convert_move) means that we can
6356 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6357 refer to the same register. */
6358 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6359 op[opindex], unsigned_p[a]);
6360 if (!rtx_equal_p (arg[a], op[opindex]))
6361 emit_move_insn (arg[a], op[opindex]);
6366 if (first_arg > 0 && target && target != op[0])
6368 emit_move_insn (target, op[0]);
6371 return target;
6374 static bool
6375 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6377 return false;
6380 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6381 a global register. */
6383 static int
6384 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6386 int regno;
6387 rtx x = *loc;
6389 if (! x)
6390 return 0;
6392 switch (GET_CODE (x))
6394 case SUBREG:
6395 if (REG_P (SUBREG_REG (x)))
6397 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6398 && global_regs[subreg_regno (x)])
6399 return 1;
6400 return 0;
6402 break;
6404 case REG:
6405 regno = REGNO (x);
6406 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6407 return 1;
6408 return 0;
6410 case SCRATCH:
6411 case PC:
6412 case CC0:
6413 case CONST_INT:
6414 case CONST_DOUBLE:
6415 case CONST:
6416 case LABEL_REF:
6417 return 0;
6419 case CALL:
6420 /* A non-constant call might use a global register. */
6421 return 1;
6423 default:
6424 break;
6427 return 0;
6430 /* Returns nonzero if X mentions a global register. */
6432 static int
6433 global_reg_mentioned_p (rtx x)
6435 if (INSN_P (x))
6437 if (CALL_P (x))
6439 if (! RTL_CONST_OR_PURE_CALL_P (x))
6440 return 1;
6441 x = CALL_INSN_FUNCTION_USAGE (x);
6442 if (x == 0)
6443 return 0;
6445 else
6446 x = PATTERN (x);
6449 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6451 /* Scheduling hooks for VLIW mode.
6453 Conceptually this is very simple: we have a two-pack architecture
6454 that takes one core insn and one coprocessor insn to make up either
6455 a 32- or 64-bit instruction word (depending on the option bit set in
6456 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6457 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6458 and one 48-bit cop insn or two 32-bit core/cop insns.
6460 In practice, instruction selection will be a bear. Consider in
6461 VL64 mode the following insns
6463 add $1, 1
6464 cmov $cr0, $0
6466 these cannot pack, since the add is a 16-bit core insn and cmov
6467 is a 32-bit cop insn. However,
6469 add3 $1, $1, 1
6470 cmov $cr0, $0
6472 packs just fine. For good VLIW code generation in VL64 mode, we
6473 will have to have 32-bit alternatives for many of the common core
6474 insns. Not implemented. */
6476 static int
6477 mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
6479 int cost_specified;
6481 if (REG_NOTE_KIND (link) != 0)
6483 /* See whether INSN and DEP_INSN are intrinsics that set the same
6484 hard register. If so, it is more important to free up DEP_INSN
6485 than it is to free up INSN.
6487 Note that intrinsics like mep_mulr are handled differently from
6488 the equivalent mep.md patterns. In mep.md, if we don't care
6489 about the value of $lo and $hi, the pattern will just clobber
6490 the registers, not set them. Since clobbers don't count as
6491 output dependencies, it is often possible to reorder two mulrs,
6492 even after reload.
6494 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6495 so any pair of mep_mulr()s will be inter-dependent. We should
6496 therefore give the first mep_mulr() a higher priority. */
6497 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6498 && global_reg_mentioned_p (PATTERN (insn))
6499 && global_reg_mentioned_p (PATTERN (dep_insn)))
6500 return 1;
6502 /* If the dependence is an anti or output dependence, assume it
6503 has no cost. */
6504 return 0;
6507 /* If we can't recognize the insns, we can't really do anything. */
6508 if (recog_memoized (dep_insn) < 0)
6509 return cost;
6511 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6512 attribute instead. */
6513 if (!TARGET_H1)
6515 cost_specified = get_attr_latency (dep_insn);
6516 if (cost_specified != 0)
6517 return cost_specified;
6520 return cost;
6523 /* ??? We don't properly compute the length of a load/store insn,
6524 taking into account the addressing mode. */
6526 static int
6527 mep_issue_rate (void)
6529 return TARGET_IVC2 ? 3 : 2;
6532 /* Return true if function DECL was declared with the vliw attribute. */
6534 bool
6535 mep_vliw_function_p (tree decl)
6537 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6540 static rtx_insn *
6541 mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6542 int length)
6544 int i;
6546 for (i = nready - 1; i >= 0; --i)
6548 rtx_insn *insn = ready[i];
6549 if (recog_memoized (insn) >= 0
6550 && get_attr_slot (insn) == slot
6551 && get_attr_length (insn) == length)
6552 return insn;
6555 return NULL;
6558 static void
6559 mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
6561 int i;
6563 for (i = 0; i < nready; ++i)
6564 if (ready[i] == insn)
6566 for (; i < nready - 1; ++i)
6567 ready[i] = ready[i + 1];
6568 ready[i] = insn;
6569 return;
6572 gcc_unreachable ();
6575 static void
6576 mep_print_sched_insn (FILE *dump, rtx_insn *insn)
6578 const char *slots = "none";
6579 const char *name = NULL;
6580 int code;
6581 char buf[30];
6583 if (GET_CODE (PATTERN (insn)) == SET
6584 || GET_CODE (PATTERN (insn)) == PARALLEL)
6586 switch (get_attr_slots (insn))
6588 case SLOTS_CORE: slots = "core"; break;
6589 case SLOTS_C3: slots = "c3"; break;
6590 case SLOTS_P0: slots = "p0"; break;
6591 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6592 case SLOTS_P0_P1: slots = "p0,p1"; break;
6593 case SLOTS_P0S: slots = "p0s"; break;
6594 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6595 case SLOTS_P1: slots = "p1"; break;
6596 default:
6597 sprintf(buf, "%d", get_attr_slots (insn));
6598 slots = buf;
6599 break;
6602 if (GET_CODE (PATTERN (insn)) == USE)
6603 slots = "use";
6605 code = INSN_CODE (insn);
6606 if (code >= 0)
6607 name = get_insn_name (code);
6608 if (!name)
6609 name = "{unknown}";
6611 fprintf (dump,
6612 "insn %4d %4d %8s %s\n",
6613 code,
6614 INSN_UID (insn),
6615 name,
6616 slots);
6619 static int
6620 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6621 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
6622 int *pnready, int clock ATTRIBUTE_UNUSED)
6624 int nready = *pnready;
6625 rtx_insn *core_insn, *cop_insn;
6626 int i;
6628 if (dump && sched_verbose > 1)
6630 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6631 for (i=0; i<nready; i++)
6632 mep_print_sched_insn (dump, ready[i]);
6633 fprintf (dump, "\n");
6636 if (!mep_vliw_function_p (cfun->decl))
6637 return 1;
6638 if (nready < 2)
6639 return 1;
6641 /* IVC2 uses a DFA to determine what's ready and what's not. */
6642 if (TARGET_IVC2)
6643 return nready;
6645 /* We can issue either a core or coprocessor instruction.
6646 Look for a matched pair of insns to reorder. If we don't
6647 find any, don't second-guess the scheduler's priorities. */
6649 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6650 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6651 TARGET_OPT_VL64 ? 6 : 2)))
6653 else if (TARGET_OPT_VL64
6654 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6655 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6657 else
6658 /* We didn't find a pair. Issue the single insn at the head
6659 of the ready list. */
6660 return 1;
6662 /* Reorder the two insns first. */
6663 mep_move_ready_insn (ready, nready, core_insn);
6664 mep_move_ready_insn (ready, nready - 1, cop_insn);
6665 return 2;
6668 /* A for_each_rtx callback. Return true if *X is a register that is
6669 set by insn PREV. */
6671 static int
6672 mep_store_find_set (rtx *x, void *prev)
6674 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6677 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6678 not the containing insn. */
6680 static bool
6681 mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
6683 /* Cope with intrinsics like swcpa. */
6684 if (GET_CODE (pat) == PARALLEL)
6686 int i;
6688 for (i = 0; i < XVECLEN (pat, 0); i++)
6689 if (mep_store_data_bypass_p (prev,
6690 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
6691 return true;
6693 return false;
6696 /* Check for some sort of store. */
6697 if (GET_CODE (pat) != SET
6698 || GET_CODE (SET_DEST (pat)) != MEM)
6699 return false;
6701 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6702 The first operand to the unspec is the store data and the other operands
6703 are used to calculate the address. */
6704 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6706 rtx src;
6707 int i;
6709 src = SET_SRC (pat);
6710 for (i = 1; i < XVECLEN (src, 0); i++)
6711 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6712 return false;
6714 return true;
6717 /* Otherwise just check that PREV doesn't modify any register mentioned
6718 in the memory destination. */
6719 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6722 /* Return true if INSN is a store instruction and if the store address
6723 has no true dependence on PREV. */
6725 bool
6726 mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
6728 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6731 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6732 is a register other than LO or HI and if PREV sets *X. */
6734 static int
6735 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6737 return (REG_P (*x)
6738 && REGNO (*x) != LO_REGNO
6739 && REGNO (*x) != HI_REGNO
6740 && reg_set_p (*x, (const_rtx) prev));
6743 /* Return true if, apart from HI/LO, there are no true dependencies
6744 between multiplication instructions PREV and INSN. */
6746 bool
6747 mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
6749 rtx pat;
6751 pat = PATTERN (insn);
6752 if (GET_CODE (pat) == PARALLEL)
6753 pat = XVECEXP (pat, 0, 0);
6754 return (GET_CODE (pat) == SET
6755 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6758 /* Return true if INSN is an ldc instruction that issues to the
6759 MeP-h1 integer pipeline. This is true for instructions that
6760 read from PSW, LP, SAR, HI and LO. */
6762 bool
6763 mep_ipipe_ldc_p (rtx_insn *insn)
6765 rtx pat, src;
6767 pat = PATTERN (insn);
6769 /* Cope with instrinsics that set both a hard register and its shadow.
6770 The set of the hard register comes first. */
6771 if (GET_CODE (pat) == PARALLEL)
6772 pat = XVECEXP (pat, 0, 0);
6774 if (GET_CODE (pat) == SET)
6776 src = SET_SRC (pat);
6778 /* Cope with intrinsics. The first operand to the unspec is
6779 the source register. */
6780 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6781 src = XVECEXP (src, 0, 0);
6783 if (REG_P (src))
6784 switch (REGNO (src))
6786 case PSW_REGNO:
6787 case LP_REGNO:
6788 case SAR_REGNO:
6789 case HI_REGNO:
6790 case LO_REGNO:
6791 return true;
6794 return false;
6797 /* Create a VLIW bundle from core instruction CORE and coprocessor
6798 instruction COP. COP always satisfies INSN_P, but CORE can be
6799 either a new pattern or an existing instruction.
6801 Emit the bundle in place of COP and return it. */
6803 static rtx_insn *
6804 mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
6806 rtx seq;
6807 rtx_insn *core_insn;
6808 rtx_insn *insn;
6810 /* If CORE is an existing instruction, remove it, otherwise put
6811 the new pattern in an INSN harness. */
6812 if (INSN_P (core_insn_or_pat))
6814 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6815 remove_insn (core_insn);
6817 else
6818 core_insn = make_insn_raw (core_insn_or_pat);
6820 /* Generate the bundle sequence and replace COP with it. */
6821 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
6822 insn = emit_insn_after (seq, cop);
6823 remove_insn (cop);
6825 /* Set up the links of the insns inside the SEQUENCE. */
6826 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6827 SET_NEXT_INSN (core_insn) = cop;
6828 SET_PREV_INSN (cop) = core_insn;
6829 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
6831 /* Set the VLIW flag for the coprocessor instruction. */
6832 PUT_MODE (core_insn, VOIDmode);
6833 PUT_MODE (cop, BImode);
6835 /* Derive a location for the bundle. Individual instructions cannot
6836 have their own location because there can be no assembler labels
6837 between CORE_INSN and COP. */
6838 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6839 INSN_LOCATION (core_insn) = 0;
6840 INSN_LOCATION (cop) = 0;
6842 return insn;
6845 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6847 static void
6848 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6850 rtx * pinsn = (rtx *) data;
6852 if (*pinsn && reg_mentioned_p (x, *pinsn))
6853 *pinsn = NULL_RTX;
6856 /* Return true if anything in insn X is (anti,output,true) dependent on
6857 anything in insn Y. */
6859 static int
6860 mep_insn_dependent_p (rtx x, rtx y)
6862 rtx tmp;
6864 gcc_assert (INSN_P (x));
6865 gcc_assert (INSN_P (y));
6867 tmp = PATTERN (y);
6868 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6869 if (tmp == NULL_RTX)
6870 return 1;
6872 tmp = PATTERN (x);
6873 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6874 if (tmp == NULL_RTX)
6875 return 1;
6877 return 0;
6880 static int
6881 core_insn_p (rtx_insn *insn)
6883 if (GET_CODE (PATTERN (insn)) == USE)
6884 return 0;
6885 if (get_attr_slot (insn) == SLOT_CORE)
6886 return 1;
6887 return 0;
6890 /* Mark coprocessor instructions that can be bundled together with
6891 the immediately preceding core instruction. This is later used
6892 to emit the "+" that tells the assembler to create a VLIW insn.
6894 For unbundled insns, the assembler will automatically add coprocessor
6895 nops, and 16-bit core nops. Due to an apparent oversight in the
6896 spec, the assembler will _not_ automatically add 32-bit core nops,
6897 so we have to emit those here.
6899 Called from mep_insn_reorg. */
6901 static void
6902 mep_bundle_insns (rtx_insn *insns)
6904 rtx_insn *insn, *last = NULL, *first = NULL;
6905 int saw_scheduling = 0;
6907 /* Only do bundling if we're in vliw mode. */
6908 if (!mep_vliw_function_p (cfun->decl))
6909 return;
6911 /* The first insn in a bundle are TImode, the remainder are
6912 VOIDmode. After this function, the first has VOIDmode and the
6913 rest have BImode. */
6915 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6917 /* First, move any NOTEs that are within a bundle, to the beginning
6918 of the bundle. */
6919 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6921 if (NOTE_P (insn) && first)
6922 /* Don't clear FIRST. */;
6924 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6925 first = insn;
6927 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6929 rtx_insn *note, *prev;
6931 /* INSN is part of a bundle; FIRST is the first insn in that
6932 bundle. Move all intervening notes out of the bundle.
6933 In addition, since the debug pass may insert a label
6934 whenever the current line changes, set the location info
6935 for INSN to match FIRST. */
6937 INSN_LOCATION (insn) = INSN_LOCATION (first);
6939 note = PREV_INSN (insn);
6940 while (note && note != first)
6942 prev = PREV_INSN (note);
6944 if (NOTE_P (note))
6946 /* Remove NOTE from here... */
6947 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6948 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6949 /* ...and put it in here. */
6950 SET_NEXT_INSN (note) = first;
6951 SET_PREV_INSN (note) = PREV_INSN (first);
6952 SET_NEXT_INSN (PREV_INSN (note)) = note;
6953 SET_PREV_INSN (NEXT_INSN (note)) = note;
6956 note = prev;
6960 else if (!NONJUMP_INSN_P (insn))
6961 first = 0;
6964 /* Now fix up the bundles. */
6965 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6967 if (NOTE_P (insn))
6968 continue;
6970 if (!NONJUMP_INSN_P (insn))
6972 last = 0;
6973 continue;
6976 /* If we're not optimizing enough, there won't be scheduling
6977 info. We detect that here. */
6978 if (GET_MODE (insn) == TImode)
6979 saw_scheduling = 1;
6980 if (!saw_scheduling)
6981 continue;
6983 if (TARGET_IVC2)
6985 rtx_insn *core_insn = NULL;
6987 /* IVC2 slots are scheduled by DFA, so we just accept
6988 whatever the scheduler gives us. However, we must make
6989 sure the core insn (if any) is the first in the bundle.
6990 The IVC2 assembler can insert whatever NOPs are needed,
6991 and allows a COP insn to be first. */
6993 if (NONJUMP_INSN_P (insn)
6994 && GET_CODE (PATTERN (insn)) != USE
6995 && GET_MODE (insn) == TImode)
6997 for (last = insn;
6998 NEXT_INSN (last)
6999 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7000 && NONJUMP_INSN_P (NEXT_INSN (last));
7001 last = NEXT_INSN (last))
7003 if (core_insn_p (last))
7004 core_insn = last;
7006 if (core_insn_p (last))
7007 core_insn = last;
7009 if (core_insn && core_insn != insn)
7011 /* Swap core insn to first in the bundle. */
7013 /* Remove core insn. */
7014 if (PREV_INSN (core_insn))
7015 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7016 if (NEXT_INSN (core_insn))
7017 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7019 /* Re-insert core insn. */
7020 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
7021 SET_NEXT_INSN (core_insn) = insn;
7023 if (PREV_INSN (core_insn))
7024 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7025 SET_PREV_INSN (insn) = core_insn;
7027 PUT_MODE (core_insn, TImode);
7028 PUT_MODE (insn, VOIDmode);
7032 /* The first insn has TImode, the rest have VOIDmode */
7033 if (GET_MODE (insn) == TImode)
7034 PUT_MODE (insn, VOIDmode);
7035 else
7036 PUT_MODE (insn, BImode);
7037 continue;
7040 PUT_MODE (insn, VOIDmode);
7041 if (recog_memoized (insn) >= 0
7042 && get_attr_slot (insn) == SLOT_COP)
7044 if (JUMP_P (insn)
7045 || ! last
7046 || recog_memoized (last) < 0
7047 || get_attr_slot (last) != SLOT_CORE
7048 || (get_attr_length (insn)
7049 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7050 || mep_insn_dependent_p (insn, last))
7052 switch (get_attr_length (insn))
7054 case 8:
7055 break;
7056 case 6:
7057 insn = mep_make_bundle (gen_nop (), insn);
7058 break;
7059 case 4:
7060 if (TARGET_OPT_VL64)
7061 insn = mep_make_bundle (gen_nop32 (), insn);
7062 break;
7063 case 2:
7064 if (TARGET_OPT_VL64)
7065 error ("2 byte cop instructions are"
7066 " not allowed in 64-bit VLIW mode");
7067 else
7068 insn = mep_make_bundle (gen_nop (), insn);
7069 break;
7070 default:
7071 error ("unexpected %d byte cop instruction",
7072 get_attr_length (insn));
7073 break;
7076 else
7077 insn = mep_make_bundle (last, insn);
7080 last = insn;
7085 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7086 Return true on success. This function can fail if the intrinsic
7087 is unavailable or if the operands don't satisfy their predicates. */
7089 bool
7090 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7092 const struct cgen_insn *cgen_insn;
7093 const struct insn_data_d *idata;
7094 rtx newop[10];
7095 int i;
7097 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7098 return false;
7100 idata = &insn_data[cgen_insn->icode];
7101 for (i = 0; i < idata->n_operands; i++)
7103 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7104 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7105 return false;
7108 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7109 newop[3], newop[4], newop[5],
7110 newop[6], newop[7], newop[8]));
7112 return true;
7116 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7117 OPERANDS[0]. Report an error if the instruction could not
7118 be synthesized. OPERANDS[1] is a register_operand. For sign
7119 and zero extensions, it may be smaller than SImode. */
7121 bool
7122 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7123 rtx * operands ATTRIBUTE_UNUSED)
7125 return false;
7129 /* Likewise, but apply a binary operation to OPERANDS[1] and
7130 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7131 can be a general_operand.
7133 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7134 third operand. REG and REG3 take register operands only. */
7136 bool
7137 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7138 int ATTRIBUTE_UNUSED immediate3,
7139 int ATTRIBUTE_UNUSED reg,
7140 int ATTRIBUTE_UNUSED reg3,
7141 rtx * operands ATTRIBUTE_UNUSED)
7143 return false;
7146 static bool
7147 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7148 int opno ATTRIBUTE_UNUSED, int *total,
7149 bool ATTRIBUTE_UNUSED speed_t)
7151 switch (code)
7153 case CONST_INT:
7154 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7155 *total = 0;
7156 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7157 *total = 1;
7158 else
7159 *total = 3;
7160 return true;
7162 case SYMBOL_REF:
7163 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7164 return true;
7166 case MULT:
7167 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7168 ? COSTS_N_INSNS (3)
7169 : COSTS_N_INSNS (2));
7170 return true;
7172 return false;
7175 static int
7176 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7177 enum machine_mode mode ATTRIBUTE_UNUSED,
7178 addr_space_t as ATTRIBUTE_UNUSED,
7179 bool ATTRIBUTE_UNUSED speed_p)
7181 return 1;
7184 static void
7185 mep_asm_init_sections (void)
7187 based_section
7188 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7189 "\t.section .based,\"aw\"");
7191 tinybss_section
7192 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7193 "\t.section .sbss,\"aw\"");
7195 sdata_section
7196 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7197 "\t.section .sdata,\"aw\",@progbits");
7199 far_section
7200 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7201 "\t.section .far,\"aw\"");
7203 farbss_section
7204 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7205 "\t.section .farbss,\"aw\"");
7207 frodata_section
7208 = get_unnamed_section (0, output_section_asm_op,
7209 "\t.section .frodata,\"a\"");
7211 srodata_section
7212 = get_unnamed_section (0, output_section_asm_op,
7213 "\t.section .srodata,\"a\"");
7215 vtext_section
7216 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7217 "\t.section .vtext,\"axv\"\n\t.vliw");
7219 vftext_section
7220 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7221 "\t.section .vftext,\"axv\"\n\t.vliw");
7223 ftext_section
7224 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7225 "\t.section .ftext,\"ax\"\n\t.core");
7229 /* Initialize the GCC target structure. */
7231 #undef TARGET_ASM_FUNCTION_PROLOGUE
7232 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7233 #undef TARGET_ATTRIBUTE_TABLE
7234 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7235 #undef TARGET_COMP_TYPE_ATTRIBUTES
7236 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7237 #undef TARGET_INSERT_ATTRIBUTES
7238 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7239 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7240 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7241 #undef TARGET_CAN_INLINE_P
7242 #define TARGET_CAN_INLINE_P mep_can_inline_p
7243 #undef TARGET_SECTION_TYPE_FLAGS
7244 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7245 #undef TARGET_ASM_NAMED_SECTION
7246 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7247 #undef TARGET_INIT_BUILTINS
7248 #define TARGET_INIT_BUILTINS mep_init_builtins
7249 #undef TARGET_EXPAND_BUILTIN
7250 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7251 #undef TARGET_SCHED_ADJUST_COST
7252 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7253 #undef TARGET_SCHED_ISSUE_RATE
7254 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7255 #undef TARGET_SCHED_REORDER
7256 #define TARGET_SCHED_REORDER mep_sched_reorder
7257 #undef TARGET_STRIP_NAME_ENCODING
7258 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7259 #undef TARGET_ASM_SELECT_SECTION
7260 #define TARGET_ASM_SELECT_SECTION mep_select_section
7261 #undef TARGET_ASM_UNIQUE_SECTION
7262 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7263 #undef TARGET_ENCODE_SECTION_INFO
7264 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7266 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7267 #undef TARGET_RTX_COSTS
7268 #define TARGET_RTX_COSTS mep_rtx_cost
7269 #undef TARGET_ADDRESS_COST
7270 #define TARGET_ADDRESS_COST mep_address_cost
7271 #undef TARGET_MACHINE_DEPENDENT_REORG
7272 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7273 #undef TARGET_SETUP_INCOMING_VARARGS
7274 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7275 #undef TARGET_PASS_BY_REFERENCE
7276 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7277 #undef TARGET_FUNCTION_ARG
7278 #define TARGET_FUNCTION_ARG mep_function_arg
7279 #undef TARGET_FUNCTION_ARG_ADVANCE
7280 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7281 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7282 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7283 #undef TARGET_OPTION_OVERRIDE
7284 #define TARGET_OPTION_OVERRIDE mep_option_override
7285 #undef TARGET_ALLOCATE_INITIAL_VALUE
7286 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7287 #undef TARGET_ASM_INIT_SECTIONS
7288 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7289 #undef TARGET_RETURN_IN_MEMORY
7290 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7291 #undef TARGET_NARROW_VOLATILE_BITFIELD
7292 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7293 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7294 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7295 #undef TARGET_BUILD_BUILTIN_VA_LIST
7296 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7297 #undef TARGET_EXPAND_BUILTIN_VA_START
7298 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7299 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7300 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7301 #undef TARGET_CAN_ELIMINATE
7302 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7303 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7304 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7305 #undef TARGET_TRAMPOLINE_INIT
7306 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7307 #undef TARGET_LEGITIMATE_CONSTANT_P
7308 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7309 #undef TARGET_CAN_USE_DOLOOP_P
7310 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7312 struct gcc_target targetm = TARGET_INITIALIZER;
7314 #include "gt-mep.h"