Merge from mainline (163495:164578).
[official-gcc/graphite-test-results.git] / gcc / config / mep / mep.c
blob712abbe5f2aa0bd4a0a6b82e67631c658c4ea1d5
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "obstack.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "except.h"
41 #include "function.h"
42 #include "optabs.h"
43 #include "reload.h"
44 #include "tm_p.h"
45 #include "ggc.h"
46 #include "diagnostic-core.h"
47 #include "toplev.h"
48 #include "integrate.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "df.h"
53 #include "gimple.h"
55 /* Structure of this file:
57 + Command Line Option Support
58 + Pattern support - constraints, predicates, expanders
59 + Reload Support
60 + Costs
61 + Functions to save and restore machine-specific function data.
62 + Frame/Epilog/Prolog Related
63 + Operand Printing
64 + Function args in registers
65 + Handle pipeline hazards
66 + Handle attributes
67 + Trampolines
68 + Machine-dependent Reorg
69 + Builtins. */
71 /* Symbol encodings:
73 Symbols are encoded as @ <char> . <name> where <char> is one of these:
75 b - based
76 t - tiny
77 n - near
78 f - far
79 i - io, near
80 I - io, far
81 c - cb (control bus) */
83 struct GTY(()) machine_function
85 int mep_frame_pointer_needed;
87 /* For varargs. */
88 int arg_regs_to_save;
89 int regsave_filler;
90 int frame_filler;
91 int frame_locked;
93 /* Records __builtin_return address. */
94 rtx eh_stack_adjust;
96 int reg_save_size;
97 int reg_save_slot[FIRST_PSEUDO_REGISTER];
98 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
100 /* 2 if the current function has an interrupt attribute, 1 if not, 0
101 if unknown. This is here because resource.c uses EPILOGUE_USES
102 which needs it. */
103 int interrupt_handler;
105 /* Likewise, for disinterrupt attribute. */
106 int disable_interrupts;
108 /* Number of doloop tags used so far. */
109 int doloop_tags;
111 /* True if the last tag was allocated to a doloop_end. */
112 bool doloop_tag_from_end;
114 /* True if reload changes $TP. */
115 bool reload_changes_tp;
117 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
118 We only set this if the function is an interrupt handler. */
119 int asms_without_operands;
122 #define MEP_CONTROL_REG(x) \
123 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125 static GTY(()) section * based_section;
126 static GTY(()) section * tinybss_section;
127 static GTY(()) section * far_section;
128 static GTY(()) section * farbss_section;
129 static GTY(()) section * frodata_section;
130 static GTY(()) section * srodata_section;
132 static GTY(()) section * vtext_section;
133 static GTY(()) section * vftext_section;
134 static GTY(()) section * ftext_section;
136 static void mep_set_leaf_registers (int);
137 static bool symbol_p (rtx);
138 static bool symbolref_p (rtx);
139 static void encode_pattern_1 (rtx);
140 static void encode_pattern (rtx);
141 static bool const_in_range (rtx, int, int);
142 static void mep_rewrite_mult (rtx, rtx);
143 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
144 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
145 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
146 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
147 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
148 static bool mep_nongeneral_reg (rtx);
149 static bool mep_general_copro_reg (rtx);
150 static bool mep_nonregister (rtx);
151 static struct machine_function* mep_init_machine_status (void);
152 static rtx mep_tp_rtx (void);
153 static rtx mep_gp_rtx (void);
154 static bool mep_interrupt_p (void);
155 static bool mep_disinterrupt_p (void);
156 static bool mep_reg_set_p (rtx, rtx);
157 static bool mep_reg_set_in_function (int);
158 static bool mep_interrupt_saved_reg (int);
159 static bool mep_call_saves_register (int);
160 static rtx F (rtx);
161 static void add_constant (int, int, int, int);
162 static rtx maybe_dead_move (rtx, rtx, bool);
163 static void mep_reload_pointer (int, const char *);
164 static void mep_start_function (FILE *, HOST_WIDE_INT);
165 static bool mep_function_ok_for_sibcall (tree, tree);
166 static int unique_bit_in (HOST_WIDE_INT);
167 static int bit_size_for_clip (HOST_WIDE_INT);
168 static int bytesize (const_tree, enum machine_mode);
169 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
170 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
171 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
172 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
174 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
175 static bool mep_function_attribute_inlinable_p (const_tree);
176 static bool mep_can_inline_p (tree, tree);
177 static bool mep_lookup_pragma_disinterrupt (const char *);
178 static int mep_multiple_address_regions (tree, bool);
179 static int mep_attrlist_to_encoding (tree, tree);
180 static void mep_insert_attributes (tree, tree *);
181 static void mep_encode_section_info (tree, rtx, int);
182 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
183 static void mep_unique_section (tree, int);
184 static unsigned int mep_section_type_flags (tree, const char *, int);
185 static void mep_asm_named_section (const char *, unsigned int, tree);
186 static bool mep_mentioned_p (rtx, rtx, int);
187 static void mep_reorg_regmove (rtx);
188 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
189 static void mep_reorg_repeat (rtx);
190 static bool mep_invertable_branch_p (rtx);
191 static void mep_invert_branch (rtx, rtx);
192 static void mep_reorg_erepeat (rtx);
193 static void mep_jmp_return_reorg (rtx);
194 static void mep_reorg_addcombine (rtx);
195 static void mep_reorg (void);
196 static void mep_init_intrinsics (void);
197 static void mep_init_builtins (void);
198 static void mep_intrinsic_unavailable (int);
199 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
200 static bool mep_get_move_insn (int, const struct cgen_insn **);
201 static rtx mep_convert_arg (enum machine_mode, rtx);
202 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
203 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
204 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
205 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
206 static int mep_adjust_cost (rtx, rtx, rtx, int);
207 static int mep_issue_rate (void);
208 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
209 static void mep_move_ready_insn (rtx *, int, rtx);
210 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
211 static rtx mep_make_bundle (rtx, rtx);
212 static void mep_bundle_insns (rtx);
213 static bool mep_rtx_cost (rtx, int, int, int *, bool);
214 static int mep_address_cost (rtx, bool);
215 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
216 tree, int *, int);
217 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
218 const_tree, bool);
219 static bool mep_vector_mode_supported_p (enum machine_mode);
220 static bool mep_handle_option (size_t, const char *, int);
221 static rtx mep_allocate_initial_value (rtx);
222 static void mep_asm_init_sections (void);
223 static int mep_comp_type_attributes (const_tree, const_tree);
224 static bool mep_narrow_volatile_bitfield (void);
225 static rtx mep_expand_builtin_saveregs (void);
226 static tree mep_build_builtin_va_list (void);
227 static void mep_expand_va_start (tree, rtx);
228 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
229 static bool mep_can_eliminate (const int, const int);
230 static void mep_trampoline_init (rtx, tree, rtx);
232 #define WANT_GCC_DEFINITIONS
233 #include "mep-intrin.h"
234 #undef WANT_GCC_DEFINITIONS
237 /* Command Line Option Support. */
239 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
241 /* True if we can use cmov instructions to move values back and forth
242 between core and coprocessor registers. */
243 bool mep_have_core_copro_moves_p;
245 /* True if we can use cmov instructions (or a work-alike) to move
246 values between coprocessor registers. */
247 bool mep_have_copro_copro_moves_p;
249 /* A table of all coprocessor instructions that can act like
250 a coprocessor-to-coprocessor cmov. */
251 static const int mep_cmov_insns[] = {
252 mep_cmov,
253 mep_cpmov,
254 mep_fmovs,
255 mep_caddi3,
256 mep_csubi3,
257 mep_candi3,
258 mep_cori3,
259 mep_cxori3,
260 mep_cand3,
261 mep_cor3
264 static int option_mtiny_specified = 0;
267 static void
268 mep_set_leaf_registers (int enable)
270 int i;
272 if (mep_leaf_registers[0] != enable)
273 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
274 mep_leaf_registers[i] = enable;
277 void
278 mep_conditional_register_usage (void)
280 int i;
282 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
284 fixed_regs[HI_REGNO] = 1;
285 fixed_regs[LO_REGNO] = 1;
286 call_used_regs[HI_REGNO] = 1;
287 call_used_regs[LO_REGNO] = 1;
290 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
291 global_regs[i] = 1;
294 static void
295 mep_option_optimization (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
297 /* The first scheduling pass often increases register pressure and tends
298 to result in more spill code. Only run it when specifically asked. */
299 flag_schedule_insns = 0;
301 /* Using $fp doesn't gain us much, even when debugging is important. */
302 flag_omit_frame_pointer = 1;
305 static void
306 mep_option_override (void)
308 if (flag_pic == 1)
309 warning (OPT_fpic, "-fpic is not supported");
310 if (flag_pic == 2)
311 warning (OPT_fPIC, "-fPIC is not supported");
312 if (TARGET_S && TARGET_M)
313 error ("only one of -ms and -mm may be given");
314 if (TARGET_S && TARGET_L)
315 error ("only one of -ms and -ml may be given");
316 if (TARGET_M && TARGET_L)
317 error ("only one of -mm and -ml may be given");
318 if (TARGET_S && option_mtiny_specified)
319 error ("only one of -ms and -mtiny= may be given");
320 if (TARGET_M && option_mtiny_specified)
321 error ("only one of -mm and -mtiny= may be given");
322 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
323 warning (0, "-mclip currently has no effect without -mminmax");
325 if (mep_const_section)
327 if (strcmp (mep_const_section, "tiny") != 0
328 && strcmp (mep_const_section, "near") != 0
329 && strcmp (mep_const_section, "far") != 0)
330 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
333 if (TARGET_S)
334 mep_tiny_cutoff = 65536;
335 if (TARGET_M)
336 mep_tiny_cutoff = 0;
337 if (TARGET_L && ! option_mtiny_specified)
338 mep_tiny_cutoff = 0;
340 if (TARGET_64BIT_CR_REGS)
341 flag_split_wide_types = 0;
343 init_machine_status = mep_init_machine_status;
344 mep_init_intrinsics ();
347 /* Pattern Support - constraints, predicates, expanders. */
349 /* MEP has very few instructions that can refer to the span of
350 addresses used by symbols, so it's common to check for them. */
352 static bool
353 symbol_p (rtx x)
355 int c = GET_CODE (x);
357 return (c == CONST_INT
358 || c == CONST
359 || c == SYMBOL_REF);
362 static bool
363 symbolref_p (rtx x)
365 int c;
367 if (GET_CODE (x) != MEM)
368 return false;
370 c = GET_CODE (XEXP (x, 0));
371 return (c == CONST_INT
372 || c == CONST
373 || c == SYMBOL_REF);
376 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
378 #define GEN_REG(R, STRICT) \
379 (GR_REGNO_P (R) \
380 || (!STRICT \
381 && ((R) == ARG_POINTER_REGNUM \
382 || (R) >= FIRST_PSEUDO_REGISTER)))
384 static char pattern[12], *patternp;
385 static GTY(()) rtx patternr[12];
386 #define RTX_IS(x) (strcmp (pattern, x) == 0)
388 static void
389 encode_pattern_1 (rtx x)
391 int i;
393 if (patternp == pattern + sizeof (pattern) - 2)
395 patternp[-1] = '?';
396 return;
399 patternr[patternp-pattern] = x;
401 switch (GET_CODE (x))
403 case REG:
404 *patternp++ = 'r';
405 break;
406 case MEM:
407 *patternp++ = 'm';
408 case CONST:
409 encode_pattern_1 (XEXP(x, 0));
410 break;
411 case PLUS:
412 *patternp++ = '+';
413 encode_pattern_1 (XEXP(x, 0));
414 encode_pattern_1 (XEXP(x, 1));
415 break;
416 case LO_SUM:
417 *patternp++ = 'L';
418 encode_pattern_1 (XEXP(x, 0));
419 encode_pattern_1 (XEXP(x, 1));
420 break;
421 case HIGH:
422 *patternp++ = 'H';
423 encode_pattern_1 (XEXP(x, 0));
424 break;
425 case SYMBOL_REF:
426 *patternp++ = 's';
427 break;
428 case LABEL_REF:
429 *patternp++ = 'l';
430 break;
431 case CONST_INT:
432 case CONST_DOUBLE:
433 *patternp++ = 'i';
434 break;
435 case UNSPEC:
436 *patternp++ = 'u';
437 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
438 for (i=0; i<XVECLEN (x, 0); i++)
439 encode_pattern_1 (XVECEXP (x, 0, i));
440 break;
441 case USE:
442 *patternp++ = 'U';
443 break;
444 default:
445 *patternp++ = '?';
446 #if 0
447 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
448 debug_rtx (x);
449 gcc_unreachable ();
450 #endif
451 break;
455 static void
456 encode_pattern (rtx x)
458 patternp = pattern;
459 encode_pattern_1 (x);
460 *patternp = 0;
464 mep_section_tag (rtx x)
466 const char *name;
468 while (1)
470 switch (GET_CODE (x))
472 case MEM:
473 case CONST:
474 x = XEXP (x, 0);
475 break;
476 case UNSPEC:
477 x = XVECEXP (x, 0, 0);
478 break;
479 case PLUS:
480 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
481 return 0;
482 x = XEXP (x, 0);
483 break;
484 default:
485 goto done;
488 done:
489 if (GET_CODE (x) != SYMBOL_REF)
490 return 0;
491 name = XSTR (x, 0);
492 if (name[0] == '@' && name[2] == '.')
494 if (name[1] == 'i' || name[1] == 'I')
496 if (name[1] == 'I')
497 return 'f'; /* near */
498 return 'n'; /* far */
500 return name[1];
502 return 0;
506 mep_regno_reg_class (int regno)
508 switch (regno)
510 case SP_REGNO: return SP_REGS;
511 case TP_REGNO: return TP_REGS;
512 case GP_REGNO: return GP_REGS;
513 case 0: return R0_REGS;
514 case HI_REGNO: return HI_REGS;
515 case LO_REGNO: return LO_REGS;
516 case ARG_POINTER_REGNUM: return GENERAL_REGS;
519 if (GR_REGNO_P (regno))
520 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
521 if (CONTROL_REGNO_P (regno))
522 return CONTROL_REGS;
524 if (CR_REGNO_P (regno))
526 int i, j;
528 /* Search for the register amongst user-defined subclasses of
529 the coprocessor registers. */
530 for (i = USER0_REGS; i <= USER3_REGS; ++i)
532 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
533 continue;
534 for (j = 0; j < N_REG_CLASSES; ++j)
536 enum reg_class sub = reg_class_subclasses[i][j];
538 if (sub == LIM_REG_CLASSES)
539 return i;
540 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
541 break;
545 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
548 if (CCR_REGNO_P (regno))
549 return CCR_REGS;
551 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
552 return NO_REGS;
555 #if 0
557 mep_reg_class_from_constraint (int c, const char *str)
559 switch (c)
561 case 'a':
562 return SP_REGS;
563 case 'b':
564 return TP_REGS;
565 case 'c':
566 return CONTROL_REGS;
567 case 'd':
568 return HILO_REGS;
569 case 'e':
571 switch (str[1])
573 case 'm':
574 return LOADABLE_CR_REGS;
575 case 'x':
576 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
577 case 'r':
578 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
579 default:
580 return NO_REGS;
583 case 'h':
584 return HI_REGS;
585 case 'j':
586 return RPC_REGS;
587 case 'l':
588 return LO_REGS;
589 case 't':
590 return TPREL_REGS;
591 case 'v':
592 return GP_REGS;
593 case 'x':
594 return CR_REGS;
595 case 'y':
596 return CCR_REGS;
597 case 'z':
598 return R0_REGS;
600 case 'A':
601 case 'B':
602 case 'C':
603 case 'D':
605 enum reg_class which = c - 'A' + USER0_REGS;
606 return (reg_class_size[which] > 0 ? which : NO_REGS);
609 default:
610 return NO_REGS;
614 bool
615 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
617 switch (c)
619 case 'I': return value >= -32768 && value < 32768;
620 case 'J': return value >= 0 && value < 65536;
621 case 'K': return value >= 0 && value < 0x01000000;
622 case 'L': return value >= -32 && value < 32;
623 case 'M': return value >= 0 && value < 32;
624 case 'N': return value >= 0 && value < 16;
625 case 'O':
626 if (value & 0xffff)
627 return false;
628 return value >= -2147483647-1 && value <= 2147483647;
629 default:
630 gcc_unreachable ();
634 bool
635 mep_extra_constraint (rtx value, int c)
637 encode_pattern (value);
639 switch (c)
641 case 'R':
642 /* For near symbols, like what call uses. */
643 if (GET_CODE (value) == REG)
644 return 0;
645 return mep_call_address_operand (value, GET_MODE (value));
647 case 'S':
648 /* For signed 8-bit immediates. */
649 return (GET_CODE (value) == CONST_INT
650 && INTVAL (value) >= -128
651 && INTVAL (value) <= 127);
653 case 'T':
654 /* For tp/gp relative symbol values. */
655 return (RTX_IS ("u3s") || RTX_IS ("u2s")
656 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
658 case 'U':
659 /* Non-absolute memories. */
660 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
662 case 'W':
663 /* %hi(sym) */
664 return RTX_IS ("Hs");
666 case 'Y':
667 /* Register indirect. */
668 return RTX_IS ("mr");
670 case 'Z':
671 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
674 return false;
676 #endif
678 #undef PASS
679 #undef FAIL
681 static bool
682 const_in_range (rtx x, int minv, int maxv)
684 return (GET_CODE (x) == CONST_INT
685 && INTVAL (x) >= minv
686 && INTVAL (x) <= maxv);
689 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
690 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
691 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
692 at the end of the insn stream. */
695 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
697 if (rtx_equal_p (dest, src1))
698 return src2;
699 else if (rtx_equal_p (dest, src2))
700 return src1;
701 else
703 if (insn == 0)
704 emit_insn (gen_movsi (copy_rtx (dest), src1));
705 else
706 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
707 return src2;
711 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
712 Change the last element of PATTERN from (clobber (scratch:SI))
713 to (clobber (reg:SI HI_REGNO)). */
715 static void
716 mep_rewrite_mult (rtx insn, rtx pattern)
718 rtx hi_clobber;
720 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
721 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
722 PATTERN (insn) = pattern;
723 INSN_CODE (insn) = -1;
726 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
727 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
728 store the result in DEST if nonnull. */
730 static void
731 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
733 rtx lo, pattern;
735 lo = gen_rtx_REG (SImode, LO_REGNO);
736 if (dest)
737 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
738 mep_mulr_source (insn, dest, src1, src2));
739 else
740 pattern = gen_mulsi3_lo (lo, src1, src2);
741 mep_rewrite_mult (insn, pattern);
744 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
745 SRC3 into $lo, then use either madd or maddr. The move into $lo will
746 be deleted by a peephole2 if SRC3 is already in $lo. */
748 static void
749 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
751 rtx lo, pattern;
753 lo = gen_rtx_REG (SImode, LO_REGNO);
754 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
755 if (dest)
756 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
757 mep_mulr_source (insn, dest, src1, src2),
758 copy_rtx (lo));
759 else
760 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
761 mep_rewrite_mult (insn, pattern);
764 /* Return true if $lo has the same value as integer register GPR when
765 instruction INSN is reached. If necessary, rewrite the instruction
766 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
767 rtx for (reg:SI LO_REGNO).
769 This function is intended to be used by the peephole2 pass. Since
770 that pass goes from the end of a basic block to the beginning, and
771 propagates liveness information on the way, there is no need to
772 update register notes here.
774 If GPR_DEAD_P is true on entry, and this function returns true,
775 then the caller will replace _every_ use of GPR in and after INSN
776 with LO. This means that if the instruction that sets $lo is a
777 mulr- or maddr-type instruction, we can rewrite it to use mul or
778 madd instead. In combination with the copy progagation pass,
779 this allows us to replace sequences like:
781 mov GPR,R1
782 mulr GPR,R2
784 with:
786 mul R1,R2
788 if GPR is no longer used. */
790 static bool
791 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
795 insn = PREV_INSN (insn);
796 if (INSN_P (insn))
797 switch (recog_memoized (insn))
799 case CODE_FOR_mulsi3_1:
800 extract_insn (insn);
801 if (rtx_equal_p (recog_data.operand[0], gpr))
803 mep_rewrite_mulsi3 (insn,
804 gpr_dead_p ? NULL : recog_data.operand[0],
805 recog_data.operand[1],
806 recog_data.operand[2]);
807 return true;
809 return false;
811 case CODE_FOR_maddsi3:
812 extract_insn (insn);
813 if (rtx_equal_p (recog_data.operand[0], gpr))
815 mep_rewrite_maddsi3 (insn,
816 gpr_dead_p ? NULL : recog_data.operand[0],
817 recog_data.operand[1],
818 recog_data.operand[2],
819 recog_data.operand[3]);
820 return true;
822 return false;
824 case CODE_FOR_mulsi3r:
825 case CODE_FOR_maddsi3r:
826 extract_insn (insn);
827 return rtx_equal_p (recog_data.operand[1], gpr);
829 default:
830 if (reg_set_p (lo, insn)
831 || reg_set_p (gpr, insn)
832 || volatile_insn_p (PATTERN (insn)))
833 return false;
835 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
836 gpr_dead_p = false;
837 break;
840 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
841 return false;
844 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
846 bool
847 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
849 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
850 extract_insn (insn);
851 return result;
854 /* Return true if SET can be turned into a post-modify load or store
855 that adds OFFSET to GPR. In other words, return true if SET can be
856 changed into:
858 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
860 It's OK to change SET to an equivalent operation in order to
861 make it match. */
863 static bool
864 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
866 rtx *reg, *mem;
867 unsigned int reg_bytes, mem_bytes;
868 enum machine_mode reg_mode, mem_mode;
870 /* Only simple SETs can be converted. */
871 if (GET_CODE (set) != SET)
872 return false;
874 /* Point REG to what we hope will be the register side of the set and
875 MEM to what we hope will be the memory side. */
876 if (GET_CODE (SET_DEST (set)) == MEM)
878 mem = &SET_DEST (set);
879 reg = &SET_SRC (set);
881 else
883 reg = &SET_DEST (set);
884 mem = &SET_SRC (set);
885 if (GET_CODE (*mem) == SIGN_EXTEND)
886 mem = &XEXP (*mem, 0);
889 /* Check that *REG is a suitable coprocessor register. */
890 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
891 return false;
893 /* Check that *MEM is a suitable memory reference. */
894 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
895 return false;
897 /* Get the number of bytes in each operand. */
898 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
899 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
901 /* Check that OFFSET is suitably aligned. */
902 if (INTVAL (offset) & (mem_bytes - 1))
903 return false;
905 /* Convert *MEM to a normal integer mode. */
906 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
907 *mem = change_address (*mem, mem_mode, NULL);
909 /* Adjust *REG as well. */
910 *reg = shallow_copy_rtx (*reg);
911 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
913 /* SET is a subword load. Convert it to an explicit extension. */
914 PUT_MODE (*reg, SImode);
915 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
917 else
919 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
920 PUT_MODE (*reg, reg_mode);
922 return true;
925 /* Return the effect of frame-related instruction INSN. */
927 static rtx
928 mep_frame_expr (rtx insn)
930 rtx note, expr;
932 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
933 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
934 RTX_FRAME_RELATED_P (expr) = 1;
935 return expr;
938 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
939 new pattern in INSN1; INSN2 will be deleted by the caller. */
941 static void
942 mep_make_parallel (rtx insn1, rtx insn2)
944 rtx expr;
946 if (RTX_FRAME_RELATED_P (insn2))
948 expr = mep_frame_expr (insn2);
949 if (RTX_FRAME_RELATED_P (insn1))
950 expr = gen_rtx_SEQUENCE (VOIDmode,
951 gen_rtvec (2, mep_frame_expr (insn1), expr));
952 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
953 RTX_FRAME_RELATED_P (insn1) = 1;
956 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
957 gen_rtvec (2, PATTERN (insn1),
958 PATTERN (insn2)));
959 INSN_CODE (insn1) = -1;
962 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
963 the basic block to see if any previous load or store instruction can
964 be persuaded to do SET_INSN as a side-effect. Return true if so. */
966 static bool
967 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
969 rtx insn;
971 insn = set_insn;
974 insn = PREV_INSN (insn);
975 if (INSN_P (insn))
977 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
979 mep_make_parallel (insn, set_insn);
980 return true;
983 if (reg_set_p (reg, insn)
984 || reg_referenced_p (reg, PATTERN (insn))
985 || volatile_insn_p (PATTERN (insn)))
986 return false;
989 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
990 return false;
993 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
995 bool
996 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
998 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
999 extract_insn (insn);
1000 return result;
1003 bool
1004 mep_allow_clip (rtx ux, rtx lx, int s)
1006 HOST_WIDE_INT u = INTVAL (ux);
1007 HOST_WIDE_INT l = INTVAL (lx);
1008 int i;
1010 if (!TARGET_OPT_CLIP)
1011 return false;
1013 if (s)
1015 for (i = 0; i < 30; i ++)
1016 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1017 && (l == - ((HOST_WIDE_INT) 1 << i)))
1018 return true;
1020 else
1022 if (l != 0)
1023 return false;
1025 for (i = 0; i < 30; i ++)
1026 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1027 return true;
1029 return false;
1032 bool
1033 mep_bit_position_p (rtx x, bool looking_for)
1035 if (GET_CODE (x) != CONST_INT)
1036 return false;
1037 switch ((int) INTVAL(x) & 0xff)
1039 case 0x01: case 0x02: case 0x04: case 0x08:
1040 case 0x10: case 0x20: case 0x40: case 0x80:
1041 return looking_for;
1042 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1043 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1044 return !looking_for;
1046 return false;
1049 static bool
1050 move_needs_splitting (rtx dest, rtx src,
1051 enum machine_mode mode ATTRIBUTE_UNUSED)
1053 int s = mep_section_tag (src);
1055 while (1)
1057 if (GET_CODE (src) == CONST
1058 || GET_CODE (src) == MEM)
1059 src = XEXP (src, 0);
1060 else if (GET_CODE (src) == SYMBOL_REF
1061 || GET_CODE (src) == LABEL_REF
1062 || GET_CODE (src) == PLUS)
1063 break;
1064 else
1065 return false;
1067 if (s == 'f'
1068 || (GET_CODE (src) == PLUS
1069 && GET_CODE (XEXP (src, 1)) == CONST_INT
1070 && (INTVAL (XEXP (src, 1)) < -65536
1071 || INTVAL (XEXP (src, 1)) > 0xffffff))
1072 || (GET_CODE (dest) == REG
1073 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1074 return true;
1075 return false;
1078 bool
1079 mep_split_mov (rtx *operands, int symbolic)
1081 if (symbolic)
1083 if (move_needs_splitting (operands[0], operands[1], SImode))
1084 return true;
1085 return false;
1088 if (GET_CODE (operands[1]) != CONST_INT)
1089 return false;
1091 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1092 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1093 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1094 return false;
1096 if (((!reload_completed && !reload_in_progress)
1097 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1098 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1099 return false;
1101 return true;
1104 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1105 it to one specific value. So the insn chosen depends on whether
1106 the source and destination modes match. */
1108 bool
1109 mep_vliw_mode_match (rtx tgt)
1111 bool src_vliw = mep_vliw_function_p (cfun->decl);
1112 bool tgt_vliw = INTVAL (tgt);
1114 return src_vliw == tgt_vliw;
1117 /* Like the above, but also test for near/far mismatches. */
1119 bool
1120 mep_vliw_jmp_match (rtx tgt)
1122 bool src_vliw = mep_vliw_function_p (cfun->decl);
1123 bool tgt_vliw = INTVAL (tgt);
1125 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1126 return false;
1128 return src_vliw == tgt_vliw;
1131 bool
1132 mep_multi_slot (rtx x)
1134 return get_attr_slot (x) == SLOT_MULTI;
1138 bool
1139 mep_legitimate_constant_p (rtx x)
1141 /* We can't convert symbol values to gp- or tp-rel values after
1142 reload, as reload might have used $gp or $tp for other
1143 purposes. */
1144 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1146 char e = mep_section_tag (x);
1147 return (e != 't' && e != 'b');
1149 return 1;
1152 /* Be careful not to use macros that need to be compiled one way for
1153 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1155 bool
1156 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1158 int the_tag;
1160 #define DEBUG_LEGIT 0
1161 #if DEBUG_LEGIT
1162 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1163 debug_rtx (x);
1164 #endif
1166 if (GET_CODE (x) == LO_SUM
1167 && GET_CODE (XEXP (x, 0)) == REG
1168 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1169 && CONSTANT_P (XEXP (x, 1)))
1171 if (GET_MODE_SIZE (mode) > 4)
1173 /* We will end up splitting this, and lo_sums are not
1174 offsettable for us. */
1175 #if DEBUG_LEGIT
1176 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1177 #endif
1178 return false;
1180 #if DEBUG_LEGIT
1181 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1182 #endif
1183 return true;
1186 if (GET_CODE (x) == REG
1187 && GEN_REG (REGNO (x), strict))
1189 #if DEBUG_LEGIT
1190 fprintf (stderr, " - yup, [reg]\n");
1191 #endif
1192 return true;
1195 if (GET_CODE (x) == PLUS
1196 && GET_CODE (XEXP (x, 0)) == REG
1197 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1198 && const_in_range (XEXP (x, 1), -32768, 32767))
1200 #if DEBUG_LEGIT
1201 fprintf (stderr, " - yup, [reg+const]\n");
1202 #endif
1203 return true;
1206 if (GET_CODE (x) == PLUS
1207 && GET_CODE (XEXP (x, 0)) == REG
1208 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1209 && GET_CODE (XEXP (x, 1)) == CONST
1210 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1211 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1212 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1213 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1215 #if DEBUG_LEGIT
1216 fprintf (stderr, " - yup, [reg+unspec]\n");
1217 #endif
1218 return true;
1221 the_tag = mep_section_tag (x);
1223 if (the_tag == 'f')
1225 #if DEBUG_LEGIT
1226 fprintf (stderr, " - nope, [far]\n");
1227 #endif
1228 return false;
1231 if (mode == VOIDmode
1232 && GET_CODE (x) == SYMBOL_REF)
1234 #if DEBUG_LEGIT
1235 fprintf (stderr, " - yup, call [symbol]\n");
1236 #endif
1237 return true;
1240 if ((mode == SImode || mode == SFmode)
1241 && CONSTANT_P (x)
1242 && LEGITIMATE_CONSTANT_P (x)
1243 && the_tag != 't' && the_tag != 'b')
1245 if (GET_CODE (x) != CONST_INT
1246 || (INTVAL (x) <= 0xfffff
1247 && INTVAL (x) >= 0
1248 && (INTVAL (x) % 4) == 0))
1250 #if DEBUG_LEGIT
1251 fprintf (stderr, " - yup, [const]\n");
1252 #endif
1253 return true;
1257 #if DEBUG_LEGIT
1258 fprintf (stderr, " - nope.\n");
1259 #endif
1260 return false;
1264 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1265 enum reload_type type,
1266 int ind_levels ATTRIBUTE_UNUSED)
1268 if (GET_CODE (*x) == PLUS
1269 && GET_CODE (XEXP (*x, 0)) == MEM
1270 && GET_CODE (XEXP (*x, 1)) == REG)
1272 /* GCC will by default copy the MEM into a REG, which results in
1273 an invalid address. For us, the best thing to do is move the
1274 whole expression to a REG. */
1275 push_reload (*x, NULL_RTX, x, NULL,
1276 GENERAL_REGS, mode, VOIDmode,
1277 0, 0, opnum, type);
1278 return 1;
1281 if (GET_CODE (*x) == PLUS
1282 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1283 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1285 char e = mep_section_tag (XEXP (*x, 0));
1287 if (e != 't' && e != 'b')
1289 /* GCC thinks that (sym+const) is a valid address. Well,
1290 sometimes it is, this time it isn't. The best thing to
1291 do is reload the symbol to a register, since reg+int
1292 tends to work, and we can't just add the symbol and
1293 constant anyway. */
1294 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1295 GENERAL_REGS, mode, VOIDmode,
1296 0, 0, opnum, type);
1297 return 1;
1300 return 0;
1304 mep_core_address_length (rtx insn, int opn)
1306 rtx set = single_set (insn);
1307 rtx mem = XEXP (set, opn);
1308 rtx other = XEXP (set, 1-opn);
1309 rtx addr = XEXP (mem, 0);
1311 if (register_operand (addr, Pmode))
1312 return 2;
1313 if (GET_CODE (addr) == PLUS)
1315 rtx addend = XEXP (addr, 1);
1317 gcc_assert (REG_P (XEXP (addr, 0)));
1319 switch (REGNO (XEXP (addr, 0)))
1321 case STACK_POINTER_REGNUM:
1322 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1323 && mep_imm7a4_operand (addend, VOIDmode))
1324 return 2;
1325 break;
1327 case 13: /* TP */
1328 gcc_assert (REG_P (other));
1330 if (REGNO (other) >= 8)
1331 break;
1333 if (GET_CODE (addend) == CONST
1334 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1335 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1336 return 2;
1338 if (GET_CODE (addend) == CONST_INT
1339 && INTVAL (addend) >= 0
1340 && INTVAL (addend) <= 127
1341 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1342 return 2;
1343 break;
1347 return 4;
1351 mep_cop_address_length (rtx insn, int opn)
1353 rtx set = single_set (insn);
1354 rtx mem = XEXP (set, opn);
1355 rtx addr = XEXP (mem, 0);
1357 if (GET_CODE (mem) != MEM)
1358 return 2;
1359 if (register_operand (addr, Pmode))
1360 return 2;
1361 if (GET_CODE (addr) == POST_INC)
1362 return 2;
1364 return 4;
1367 #define DEBUG_EXPAND_MOV 0
1368 bool
1369 mep_expand_mov (rtx *operands, enum machine_mode mode)
1371 int i, t;
1372 int tag[2];
1373 rtx tpsym, tpoffs;
1374 int post_reload = 0;
1376 tag[0] = mep_section_tag (operands[0]);
1377 tag[1] = mep_section_tag (operands[1]);
1379 if (!reload_in_progress
1380 && !reload_completed
1381 && GET_CODE (operands[0]) != REG
1382 && GET_CODE (operands[0]) != SUBREG
1383 && GET_CODE (operands[1]) != REG
1384 && GET_CODE (operands[1]) != SUBREG)
1385 operands[1] = copy_to_mode_reg (mode, operands[1]);
1387 #if DEBUG_EXPAND_MOV
1388 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1389 reload_in_progress || reload_completed);
1390 debug_rtx (operands[0]);
1391 debug_rtx (operands[1]);
1392 #endif
1394 if (mode == DImode || mode == DFmode)
1395 return false;
1397 if (reload_in_progress || reload_completed)
1399 rtx r;
1401 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1402 cfun->machine->reload_changes_tp = true;
1404 if (tag[0] == 't' || tag[1] == 't')
1406 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1407 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1408 post_reload = 1;
1410 if (tag[0] == 'b' || tag[1] == 'b')
1412 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1413 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1414 post_reload = 1;
1416 if (cfun->machine->reload_changes_tp == true)
1417 post_reload = 1;
1420 if (!post_reload)
1422 rtx n;
1423 if (symbol_p (operands[1]))
1425 t = mep_section_tag (operands[1]);
1426 if (t == 'b' || t == 't')
1429 if (GET_CODE (operands[1]) == SYMBOL_REF)
1431 tpsym = operands[1];
1432 n = gen_rtx_UNSPEC (mode,
1433 gen_rtvec (1, operands[1]),
1434 t == 'b' ? UNS_TPREL : UNS_GPREL);
1435 n = gen_rtx_CONST (mode, n);
1437 else if (GET_CODE (operands[1]) == CONST
1438 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1439 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1440 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1442 tpsym = XEXP (XEXP (operands[1], 0), 0);
1443 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1444 n = gen_rtx_UNSPEC (mode,
1445 gen_rtvec (1, tpsym),
1446 t == 'b' ? UNS_TPREL : UNS_GPREL);
1447 n = gen_rtx_PLUS (mode, n, tpoffs);
1448 n = gen_rtx_CONST (mode, n);
1450 else if (GET_CODE (operands[1]) == CONST
1451 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1452 return false;
1453 else
1455 error ("unusual TP-relative address");
1456 return false;
1459 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1460 : mep_gp_rtx ()), n);
1461 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1462 #if DEBUG_EXPAND_MOV
1463 fprintf(stderr, "mep_expand_mov emitting ");
1464 debug_rtx(n);
1465 #endif
1466 return true;
1470 for (i=0; i < 2; i++)
1472 t = mep_section_tag (operands[i]);
1473 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1475 rtx sym, n, r;
1476 int u;
1478 sym = XEXP (operands[i], 0);
1479 if (GET_CODE (sym) == CONST
1480 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1481 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1483 if (t == 'b')
1485 r = mep_tp_rtx ();
1486 u = UNS_TPREL;
1488 else
1490 r = mep_gp_rtx ();
1491 u = UNS_GPREL;
1494 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1495 n = gen_rtx_CONST (Pmode, n);
1496 n = gen_rtx_PLUS (Pmode, r, n);
1497 operands[i] = replace_equiv_address (operands[i], n);
1502 if ((GET_CODE (operands[1]) != REG
1503 && MEP_CONTROL_REG (operands[0]))
1504 || (GET_CODE (operands[0]) != REG
1505 && MEP_CONTROL_REG (operands[1])))
1507 rtx temp;
1508 #if DEBUG_EXPAND_MOV
1509 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1510 #endif
1511 temp = gen_reg_rtx (mode);
1512 emit_move_insn (temp, operands[1]);
1513 operands[1] = temp;
1516 if (symbolref_p (operands[0])
1517 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1518 || (GET_MODE_SIZE (mode) != 4)))
1520 rtx temp;
1522 gcc_assert (!reload_in_progress && !reload_completed);
1524 temp = force_reg (Pmode, XEXP (operands[0], 0));
1525 operands[0] = replace_equiv_address (operands[0], temp);
1526 emit_move_insn (operands[0], operands[1]);
1527 return true;
1530 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1531 tag[1] = 0;
1533 if (symbol_p (operands[1])
1534 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1536 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1537 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1538 return true;
1541 if (symbolref_p (operands[1])
1542 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1544 rtx temp;
1546 if (reload_in_progress || reload_completed)
1547 temp = operands[0];
1548 else
1549 temp = gen_reg_rtx (Pmode);
1551 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1552 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1553 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1554 return true;
1557 return false;
1560 /* Cases where the pattern can't be made to use at all. */
1562 bool
1563 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1565 int i;
1567 #define DEBUG_MOV_OK 0
1568 #if DEBUG_MOV_OK
1569 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1570 mep_section_tag (operands[1]));
1571 debug_rtx (operands[0]);
1572 debug_rtx (operands[1]);
1573 #endif
1575 /* We want the movh patterns to get these. */
1576 if (GET_CODE (operands[1]) == HIGH)
1577 return false;
1579 /* We can't store a register to a far variable without using a
1580 scratch register to hold the address. Using far variables should
1581 be split by mep_emit_mov anyway. */
1582 if (mep_section_tag (operands[0]) == 'f'
1583 || mep_section_tag (operands[1]) == 'f')
1585 #if DEBUG_MOV_OK
1586 fprintf (stderr, " - no, f\n");
1587 #endif
1588 return false;
1590 i = mep_section_tag (operands[1]);
1591 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1592 /* These are supposed to be generated with adds of the appropriate
1593 register. During and after reload, however, we allow them to
1594 be accessed as normal symbols because adding a dependency on
1595 the base register now might cause problems. */
1597 #if DEBUG_MOV_OK
1598 fprintf (stderr, " - no, bt\n");
1599 #endif
1600 return false;
1603 /* The only moves we can allow involve at least one general
1604 register, so require it. */
1605 for (i = 0; i < 2; i ++)
1607 /* Allow subregs too, before reload. */
1608 rtx x = operands[i];
1610 if (GET_CODE (x) == SUBREG)
1611 x = XEXP (x, 0);
1612 if (GET_CODE (x) == REG
1613 && ! MEP_CONTROL_REG (x))
1615 #if DEBUG_MOV_OK
1616 fprintf (stderr, " - ok\n");
1617 #endif
1618 return true;
1621 #if DEBUG_MOV_OK
1622 fprintf (stderr, " - no, no gen reg\n");
1623 #endif
1624 return false;
1627 #define DEBUG_SPLIT_WIDE_MOVE 0
1628 void
1629 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1631 int i;
1633 #if DEBUG_SPLIT_WIDE_MOVE
1634 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1635 debug_rtx (operands[0]);
1636 debug_rtx (operands[1]);
1637 #endif
1639 for (i = 0; i <= 1; i++)
1641 rtx op = operands[i], hi, lo;
1643 switch (GET_CODE (op))
1645 case REG:
1647 unsigned int regno = REGNO (op);
1649 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1651 rtx i32;
1653 lo = gen_rtx_REG (SImode, regno);
1654 i32 = GEN_INT (32);
1655 hi = gen_rtx_ZERO_EXTRACT (SImode,
1656 gen_rtx_REG (DImode, regno),
1657 i32, i32);
1659 else
1661 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1662 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1665 break;
1667 case CONST_INT:
1668 case CONST_DOUBLE:
1669 case MEM:
1670 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1671 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1672 break;
1674 default:
1675 gcc_unreachable ();
1678 /* The high part of CR <- GPR moves must be done after the low part. */
1679 operands [i + 4] = lo;
1680 operands [i + 2] = hi;
1683 if (reg_mentioned_p (operands[2], operands[5])
1684 || GET_CODE (operands[2]) == ZERO_EXTRACT
1685 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1687 rtx tmp;
1689 /* Overlapping register pairs -- make sure we don't
1690 early-clobber ourselves. */
1691 tmp = operands[2];
1692 operands[2] = operands[4];
1693 operands[4] = tmp;
1694 tmp = operands[3];
1695 operands[3] = operands[5];
1696 operands[5] = tmp;
1699 #if DEBUG_SPLIT_WIDE_MOVE
1700 fprintf(stderr, "\033[34m");
1701 debug_rtx (operands[2]);
1702 debug_rtx (operands[3]);
1703 debug_rtx (operands[4]);
1704 debug_rtx (operands[5]);
1705 fprintf(stderr, "\033[0m");
1706 #endif
1709 /* Emit a setcc instruction in its entirity. */
1711 static bool
1712 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1714 rtx tmp;
1716 switch (code)
1718 case GT:
1719 case GTU:
1720 tmp = op1, op1 = op2, op2 = tmp;
1721 code = swap_condition (code);
1722 /* FALLTHRU */
1724 case LT:
1725 case LTU:
1726 op1 = force_reg (SImode, op1);
1727 emit_insn (gen_rtx_SET (VOIDmode, dest,
1728 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1729 return true;
1731 case EQ:
1732 if (op2 != const0_rtx)
1733 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1734 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1735 return true;
1737 case NE:
1738 /* Branchful sequence:
1739 mov dest, 0 16-bit
1740 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1741 mov dest, 1 16-bit
1743 Branchless sequence:
1744 add3 tmp, op1, -op2 32-bit (or mov + sub)
1745 sltu3 tmp, tmp, 1 16-bit
1746 xor3 dest, tmp, 1 32-bit
1748 if (optimize_size && op2 != const0_rtx)
1749 return false;
1751 if (op2 != const0_rtx)
1752 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1754 op2 = gen_reg_rtx (SImode);
1755 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1757 emit_insn (gen_rtx_SET (VOIDmode, dest,
1758 gen_rtx_XOR (SImode, op2, const1_rtx)));
1759 return true;
1761 case LE:
1762 if (GET_CODE (op2) != CONST_INT
1763 || INTVAL (op2) == 0x7ffffff)
1764 return false;
1765 op2 = GEN_INT (INTVAL (op2) + 1);
1766 return mep_expand_setcc_1 (LT, dest, op1, op2);
1768 case LEU:
1769 if (GET_CODE (op2) != CONST_INT
1770 || INTVAL (op2) == -1)
1771 return false;
1772 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1773 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1775 case GE:
1776 if (GET_CODE (op2) != CONST_INT
1777 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1778 return false;
1779 op2 = GEN_INT (INTVAL (op2) - 1);
1780 return mep_expand_setcc_1 (GT, dest, op1, op2);
1782 case GEU:
1783 if (GET_CODE (op2) != CONST_INT
1784 || op2 == const0_rtx)
1785 return false;
1786 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1787 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1789 default:
1790 gcc_unreachable ();
1794 bool
1795 mep_expand_setcc (rtx *operands)
1797 rtx dest = operands[0];
1798 enum rtx_code code = GET_CODE (operands[1]);
1799 rtx op0 = operands[2];
1800 rtx op1 = operands[3];
1802 return mep_expand_setcc_1 (code, dest, op0, op1);
1806 mep_expand_cbranch (rtx *operands)
1808 enum rtx_code code = GET_CODE (operands[0]);
1809 rtx op0 = operands[1];
1810 rtx op1 = operands[2];
1811 rtx tmp;
1813 restart:
1814 switch (code)
1816 case LT:
1817 if (mep_imm4_operand (op1, SImode))
1818 break;
1820 tmp = gen_reg_rtx (SImode);
1821 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1822 code = NE;
1823 op0 = tmp;
1824 op1 = const0_rtx;
1825 break;
1827 case GE:
1828 if (mep_imm4_operand (op1, SImode))
1829 break;
1831 tmp = gen_reg_rtx (SImode);
1832 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1834 code = EQ;
1835 op0 = tmp;
1836 op1 = const0_rtx;
1837 break;
1839 case EQ:
1840 case NE:
1841 if (! mep_reg_or_imm4_operand (op1, SImode))
1842 op1 = force_reg (SImode, op1);
1843 break;
1845 case LE:
1846 case GT:
1847 if (GET_CODE (op1) == CONST_INT
1848 && INTVAL (op1) != 0x7fffffff)
1850 op1 = GEN_INT (INTVAL (op1) + 1);
1851 code = (code == LE ? LT : GE);
1852 goto restart;
1855 tmp = gen_reg_rtx (SImode);
1856 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1858 code = (code == LE ? EQ : NE);
1859 op0 = tmp;
1860 op1 = const0_rtx;
1861 break;
1863 case LTU:
1864 if (op1 == const1_rtx)
1866 code = EQ;
1867 op1 = const0_rtx;
1868 break;
1871 tmp = gen_reg_rtx (SImode);
1872 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1873 code = NE;
1874 op0 = tmp;
1875 op1 = const0_rtx;
1876 break;
1878 case LEU:
1879 tmp = gen_reg_rtx (SImode);
1880 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1881 code = NE;
1882 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1883 code = EQ;
1884 else
1885 gcc_unreachable ();
1886 op0 = tmp;
1887 op1 = const0_rtx;
1888 break;
1890 case GTU:
1891 tmp = gen_reg_rtx (SImode);
1892 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1893 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1894 code = NE;
1895 op0 = tmp;
1896 op1 = const0_rtx;
1897 break;
1899 case GEU:
1900 tmp = gen_reg_rtx (SImode);
1901 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1902 code = NE;
1903 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1904 code = EQ;
1905 else
1906 gcc_unreachable ();
1907 op0 = tmp;
1908 op1 = const0_rtx;
1909 break;
1911 default:
1912 gcc_unreachable ();
1915 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1918 const char *
1919 mep_emit_cbranch (rtx *operands, int ne)
1921 if (GET_CODE (operands[1]) == REG)
1922 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1923 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1924 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1925 else
1926 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1929 void
1930 mep_expand_call (rtx *operands, int returns_value)
1932 rtx addr = operands[returns_value];
1933 rtx tp = mep_tp_rtx ();
1934 rtx gp = mep_gp_rtx ();
1936 gcc_assert (GET_CODE (addr) == MEM);
1938 addr = XEXP (addr, 0);
1940 if (! mep_call_address_operand (addr, VOIDmode))
1941 addr = force_reg (SImode, addr);
1943 if (! operands[returns_value+2])
1944 operands[returns_value+2] = const0_rtx;
1946 if (returns_value)
1947 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1948 operands[3], tp, gp));
1949 else
1950 emit_call_insn (gen_call_internal (addr, operands[1],
1951 operands[2], tp, gp));
1954 /* Aliasing Support. */
1956 /* If X is a machine specific address (i.e. a symbol or label being
1957 referenced as a displacement from the GOT implemented using an
1958 UNSPEC), then return the base term. Otherwise return X. */
1961 mep_find_base_term (rtx x)
1963 rtx base, term;
1964 int unspec;
1966 if (GET_CODE (x) != PLUS)
1967 return x;
1968 base = XEXP (x, 0);
1969 term = XEXP (x, 1);
1971 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1972 && base == mep_tp_rtx ())
1973 unspec = UNS_TPREL;
1974 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1975 && base == mep_gp_rtx ())
1976 unspec = UNS_GPREL;
1977 else
1978 return x;
1980 if (GET_CODE (term) != CONST)
1981 return x;
1982 term = XEXP (term, 0);
1984 if (GET_CODE (term) != UNSPEC
1985 || XINT (term, 1) != unspec)
1986 return x;
1988 return XVECEXP (term, 0, 0);
1991 /* Reload Support. */
1993 /* Return true if the registers in CLASS cannot represent the change from
1994 modes FROM to TO. */
1996 bool
1997 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1998 enum reg_class regclass)
2000 if (from == to)
2001 return false;
2003 /* 64-bit COP regs must remain 64-bit COP regs. */
2004 if (TARGET_64BIT_CR_REGS
2005 && (regclass == CR_REGS
2006 || regclass == LOADABLE_CR_REGS)
2007 && (GET_MODE_SIZE (to) < 8
2008 || GET_MODE_SIZE (from) < 8))
2009 return true;
2011 return false;
2014 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2016 static bool
2017 mep_general_reg (rtx x)
2019 while (GET_CODE (x) == SUBREG)
2020 x = XEXP (x, 0);
2021 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2024 static bool
2025 mep_nongeneral_reg (rtx x)
2027 while (GET_CODE (x) == SUBREG)
2028 x = XEXP (x, 0);
2029 return (GET_CODE (x) == REG
2030 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2033 static bool
2034 mep_general_copro_reg (rtx x)
2036 while (GET_CODE (x) == SUBREG)
2037 x = XEXP (x, 0);
2038 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2041 static bool
2042 mep_nonregister (rtx x)
2044 while (GET_CODE (x) == SUBREG)
2045 x = XEXP (x, 0);
2046 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2049 #define DEBUG_RELOAD 0
2051 /* Return the secondary reload class needed for moving value X to or
2052 from a register in coprocessor register class CLASS. */
2054 static enum reg_class
2055 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2057 if (mep_general_reg (x))
2058 /* We can do the move directly if mep_have_core_copro_moves_p,
2059 otherwise we need to go through memory. Either way, no secondary
2060 register is needed. */
2061 return NO_REGS;
2063 if (mep_general_copro_reg (x))
2065 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2066 if (mep_have_copro_copro_moves_p)
2067 return NO_REGS;
2069 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2070 if (mep_have_core_copro_moves_p)
2071 return GENERAL_REGS;
2073 /* Otherwise we need to do it through memory. No secondary
2074 register is needed. */
2075 return NO_REGS;
2078 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2079 && constraint_satisfied_p (x, CONSTRAINT_U))
2080 /* X is a memory value that we can access directly. */
2081 return NO_REGS;
2083 /* We have to move X into a GPR first and then copy it to
2084 the coprocessor register. The move from the GPR to the
2085 coprocessor might be done directly or through memory,
2086 depending on mep_have_core_copro_moves_p. */
2087 return GENERAL_REGS;
2090 /* Copying X to register in RCLASS. */
2093 mep_secondary_input_reload_class (enum reg_class rclass,
2094 enum machine_mode mode ATTRIBUTE_UNUSED,
2095 rtx x)
2097 int rv = NO_REGS;
2099 #if DEBUG_RELOAD
2100 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2101 debug_rtx (x);
2102 #endif
2104 if (reg_class_subset_p (rclass, CR_REGS))
2105 rv = mep_secondary_copro_reload_class (rclass, x);
2106 else if (MEP_NONGENERAL_CLASS (rclass)
2107 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2108 rv = GENERAL_REGS;
2110 #if DEBUG_RELOAD
2111 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2112 #endif
2113 return rv;
2116 /* Copying register in RCLASS to X. */
2119 mep_secondary_output_reload_class (enum reg_class rclass,
2120 enum machine_mode mode ATTRIBUTE_UNUSED,
2121 rtx x)
2123 int rv = NO_REGS;
2125 #if DEBUG_RELOAD
2126 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2127 debug_rtx (x);
2128 #endif
2130 if (reg_class_subset_p (rclass, CR_REGS))
2131 rv = mep_secondary_copro_reload_class (rclass, x);
2132 else if (MEP_NONGENERAL_CLASS (rclass)
2133 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2134 rv = GENERAL_REGS;
2136 #if DEBUG_RELOAD
2137 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2138 #endif
2140 return rv;
2143 /* Implement SECONDARY_MEMORY_NEEDED. */
2145 bool
2146 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2147 enum machine_mode mode ATTRIBUTE_UNUSED)
2149 if (!mep_have_core_copro_moves_p)
2151 if (reg_classes_intersect_p (rclass1, CR_REGS)
2152 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2153 return true;
2154 if (reg_classes_intersect_p (rclass2, CR_REGS)
2155 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2156 return true;
2157 if (!mep_have_copro_copro_moves_p
2158 && reg_classes_intersect_p (rclass1, CR_REGS)
2159 && reg_classes_intersect_p (rclass2, CR_REGS))
2160 return true;
2162 return false;
2165 void
2166 mep_expand_reload (rtx *operands, enum machine_mode mode)
2168 /* There are three cases for each direction:
2169 register, farsym
2170 control, farsym
2171 control, nearsym */
2173 int s0 = mep_section_tag (operands[0]) == 'f';
2174 int s1 = mep_section_tag (operands[1]) == 'f';
2175 int c0 = mep_nongeneral_reg (operands[0]);
2176 int c1 = mep_nongeneral_reg (operands[1]);
2177 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2179 #if DEBUG_RELOAD
2180 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2181 debug_rtx (operands[0]);
2182 debug_rtx (operands[1]);
2183 #endif
2185 switch (which)
2187 case 00: /* Don't know why this gets here. */
2188 case 02: /* general = far */
2189 emit_move_insn (operands[0], operands[1]);
2190 return;
2192 case 10: /* cr = mem */
2193 case 11: /* cr = cr */
2194 case 01: /* mem = cr */
2195 case 12: /* cr = far */
2196 emit_move_insn (operands[2], operands[1]);
2197 emit_move_insn (operands[0], operands[2]);
2198 return;
2200 case 20: /* far = general */
2201 emit_move_insn (operands[2], XEXP (operands[1], 0));
2202 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2203 return;
2205 case 21: /* far = cr */
2206 case 22: /* far = far */
2207 default:
2208 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2209 which, mode_name[mode]);
2210 debug_rtx (operands[0]);
2211 debug_rtx (operands[1]);
2212 gcc_unreachable ();
2216 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2217 can be moved directly into registers 0 to 7, but not into the rest.
2218 If so, and if the required class includes registers 0 to 7, restrict
2219 it to those registers. */
2221 enum reg_class
2222 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2224 switch (GET_CODE (x))
2226 case CONST_INT:
2227 if (INTVAL (x) >= 0x10000
2228 && INTVAL (x) < 0x01000000
2229 && (INTVAL (x) & 0xffff) != 0
2230 && reg_class_subset_p (TPREL_REGS, rclass))
2231 rclass = TPREL_REGS;
2232 break;
2234 case CONST:
2235 case SYMBOL_REF:
2236 case LABEL_REF:
2237 if (mep_section_tag (x) != 'f'
2238 && reg_class_subset_p (TPREL_REGS, rclass))
2239 rclass = TPREL_REGS;
2240 break;
2242 default:
2243 break;
2245 return rclass;
2248 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2249 moves, 4 for direct double-register moves, and 1000 for anything
2250 that requires a temporary register or temporary stack slot. */
2253 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2255 if (mep_have_copro_copro_moves_p
2256 && reg_class_subset_p (from, CR_REGS)
2257 && reg_class_subset_p (to, CR_REGS))
2259 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2260 return 4;
2261 return 2;
2263 if (reg_class_subset_p (from, CR_REGS)
2264 && reg_class_subset_p (to, CR_REGS))
2266 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2267 return 8;
2268 return 4;
2270 if (reg_class_subset_p (from, CR_REGS)
2271 || reg_class_subset_p (to, CR_REGS))
2273 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2274 return 4;
2275 return 2;
2277 if (mep_secondary_memory_needed (from, to, mode))
2278 return 1000;
2279 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2280 return 1000;
2282 if (GET_MODE_SIZE (mode) > 4)
2283 return 4;
2285 return 2;
2289 /* Functions to save and restore machine-specific function data. */
2291 static struct machine_function *
2292 mep_init_machine_status (void)
2294 return ggc_alloc_cleared_machine_function ();
2297 static rtx
2298 mep_allocate_initial_value (rtx reg)
2300 int rss;
2302 if (GET_CODE (reg) != REG)
2303 return NULL_RTX;
2305 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2306 return NULL_RTX;
2308 /* In interrupt functions, the "initial" values of $gp and $tp are
2309 provided by the prologue. They are not necessarily the same as
2310 the values that the caller was using. */
2311 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2312 if (mep_interrupt_p ())
2313 return NULL_RTX;
2315 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2317 cfun->machine->reg_save_size += 4;
2318 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2321 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2322 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2326 mep_return_addr_rtx (int count)
2328 if (count != 0)
2329 return const0_rtx;
2331 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2334 static rtx
2335 mep_tp_rtx (void)
2337 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2340 static rtx
2341 mep_gp_rtx (void)
2343 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2346 static bool
2347 mep_interrupt_p (void)
2349 if (cfun->machine->interrupt_handler == 0)
2351 int interrupt_handler
2352 = (lookup_attribute ("interrupt",
2353 DECL_ATTRIBUTES (current_function_decl))
2354 != NULL_TREE);
2355 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2357 return cfun->machine->interrupt_handler == 2;
2360 static bool
2361 mep_disinterrupt_p (void)
2363 if (cfun->machine->disable_interrupts == 0)
2365 int disable_interrupts
2366 = (lookup_attribute ("disinterrupt",
2367 DECL_ATTRIBUTES (current_function_decl))
2368 != NULL_TREE);
2369 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2371 return cfun->machine->disable_interrupts == 2;
2375 /* Frame/Epilog/Prolog Related. */
2377 static bool
2378 mep_reg_set_p (rtx reg, rtx insn)
2380 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2381 if (INSN_P (insn))
2383 if (FIND_REG_INC_NOTE (insn, reg))
2384 return true;
2385 insn = PATTERN (insn);
2388 if (GET_CODE (insn) == SET
2389 && GET_CODE (XEXP (insn, 0)) == REG
2390 && GET_CODE (XEXP (insn, 1)) == REG
2391 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2392 return false;
2394 return set_of (reg, insn) != NULL_RTX;
2398 #define MEP_SAVES_UNKNOWN 0
2399 #define MEP_SAVES_YES 1
2400 #define MEP_SAVES_MAYBE 2
2401 #define MEP_SAVES_NO 3
2403 static bool
2404 mep_reg_set_in_function (int regno)
2406 rtx reg, insn;
2408 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2409 return true;
2411 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2412 return true;
2414 push_topmost_sequence ();
2415 insn = get_insns ();
2416 pop_topmost_sequence ();
2418 if (!insn)
2419 return false;
2421 reg = gen_rtx_REG (SImode, regno);
2423 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2424 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2425 return true;
2426 return false;
2429 static bool
2430 mep_asm_without_operands_p (void)
2432 if (cfun->machine->asms_without_operands == 0)
2434 rtx insn;
2436 push_topmost_sequence ();
2437 insn = get_insns ();
2438 pop_topmost_sequence ();
2440 cfun->machine->asms_without_operands = 1;
2441 while (insn)
2443 if (INSN_P (insn)
2444 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2446 cfun->machine->asms_without_operands = 2;
2447 break;
2449 insn = NEXT_INSN (insn);
2453 return cfun->machine->asms_without_operands == 2;
2456 /* Interrupt functions save/restore every call-preserved register, and
2457 any call-used register it uses (or all if it calls any function,
2458 since they may get clobbered there too). Here we check to see
2459 which call-used registers need saving. */
2461 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2462 && (r == FIRST_CCR_REGNO + 1 \
2463 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2464 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2466 static bool
2467 mep_interrupt_saved_reg (int r)
2469 if (!mep_interrupt_p ())
2470 return false;
2471 if (r == REGSAVE_CONTROL_TEMP
2472 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2473 return true;
2474 if (mep_asm_without_operands_p ()
2475 && (!fixed_regs[r]
2476 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2477 || IVC2_ISAVED_REG (r)))
2478 return true;
2479 if (!current_function_is_leaf)
2480 /* Function calls mean we need to save $lp. */
2481 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2482 return true;
2483 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2484 /* The interrupt handler might use these registers for repeat blocks,
2485 or it might call a function that does so. */
2486 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2487 return true;
2488 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2489 return false;
2490 /* Functions we call might clobber these. */
2491 if (call_used_regs[r] && !fixed_regs[r])
2492 return true;
2493 /* Additional registers that need to be saved for IVC2. */
2494 if (IVC2_ISAVED_REG (r))
2495 return true;
2497 return false;
2500 static bool
2501 mep_call_saves_register (int r)
2503 if (! cfun->machine->frame_locked)
2505 int rv = MEP_SAVES_NO;
2507 if (cfun->machine->reg_save_slot[r])
2508 rv = MEP_SAVES_YES;
2509 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2510 rv = MEP_SAVES_YES;
2511 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2512 rv = MEP_SAVES_YES;
2513 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2514 rv = MEP_SAVES_YES;
2515 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2516 /* We need these to have stack slots so that they can be set during
2517 unwinding. */
2518 rv = MEP_SAVES_YES;
2519 else if (mep_interrupt_saved_reg (r))
2520 rv = MEP_SAVES_YES;
2521 cfun->machine->reg_saved[r] = rv;
2523 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2526 /* Return true if epilogue uses register REGNO. */
2528 bool
2529 mep_epilogue_uses (int regno)
2531 /* Since $lp is a call-saved register, the generic code will normally
2532 mark it used in the epilogue if it needs to be saved and restored.
2533 However, when profiling is enabled, the profiling code will implicitly
2534 clobber $11. This case has to be handled specially both here and in
2535 mep_call_saves_register. */
2536 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2537 return true;
2538 /* Interrupt functions save/restore pretty much everything. */
2539 return (reload_completed && mep_interrupt_saved_reg (regno));
2542 static int
2543 mep_reg_size (int regno)
2545 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2546 return 8;
2547 return 4;
2550 /* Worker function for TARGET_CAN_ELIMINATE. */
2552 bool
2553 mep_can_eliminate (const int from, const int to)
2555 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2556 ? ! frame_pointer_needed
2557 : true);
2561 mep_elimination_offset (int from, int to)
2563 int reg_save_size;
2564 int i;
2565 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2566 int total_size;
2568 if (!cfun->machine->frame_locked)
2569 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2571 /* We don't count arg_regs_to_save in the arg pointer offset, because
2572 gcc thinks the arg pointer has moved along with the saved regs.
2573 However, we do count it when we adjust $sp in the prologue. */
2574 reg_save_size = 0;
2575 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2576 if (mep_call_saves_register (i))
2577 reg_save_size += mep_reg_size (i);
2579 if (reg_save_size % 8)
2580 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2581 else
2582 cfun->machine->regsave_filler = 0;
2584 /* This is what our total stack adjustment looks like. */
2585 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2587 if (total_size % 8)
2588 cfun->machine->frame_filler = 8 - (total_size % 8);
2589 else
2590 cfun->machine->frame_filler = 0;
2593 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2594 return reg_save_size + cfun->machine->regsave_filler;
2596 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2597 return cfun->machine->frame_filler + frame_size;
2599 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2600 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2602 gcc_unreachable ();
2605 static rtx
2606 F (rtx x)
2608 RTX_FRAME_RELATED_P (x) = 1;
2609 return x;
2612 /* Since the prologue/epilogue code is generated after optimization,
2613 we can't rely on gcc to split constants for us. So, this code
2614 captures all the ways to add a constant to a register in one logic
2615 chunk, including optimizing away insns we just don't need. This
2616 makes the prolog/epilog code easier to follow. */
2617 static void
2618 add_constant (int dest, int src, int value, int mark_frame)
2620 rtx insn;
2621 int hi, lo;
2623 if (src == dest && value == 0)
2624 return;
2626 if (value == 0)
2628 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2629 gen_rtx_REG (SImode, src));
2630 if (mark_frame)
2631 RTX_FRAME_RELATED_P(insn) = 1;
2632 return;
2635 if (value >= -32768 && value <= 32767)
2637 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2638 gen_rtx_REG (SImode, src),
2639 GEN_INT (value)));
2640 if (mark_frame)
2641 RTX_FRAME_RELATED_P(insn) = 1;
2642 return;
2645 /* Big constant, need to use a temp register. We use
2646 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2647 area is always small enough to directly add to). */
2649 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2650 lo = value & 0xffff;
2652 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2653 GEN_INT (hi));
2655 if (lo)
2657 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2658 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2659 GEN_INT (lo)));
2662 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2663 gen_rtx_REG (SImode, src),
2664 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2665 if (mark_frame)
2667 RTX_FRAME_RELATED_P(insn) = 1;
2668 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2669 gen_rtx_SET (SImode,
2670 gen_rtx_REG (SImode, dest),
2671 gen_rtx_PLUS (SImode,
2672 gen_rtx_REG (SImode, dest),
2673 GEN_INT (value))));
2677 /* Move SRC to DEST. Mark the move as being potentially dead if
2678 MAYBE_DEAD_P. */
2680 static rtx
2681 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2683 rtx insn = emit_move_insn (dest, src);
2684 #if 0
2685 if (maybe_dead_p)
2686 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2687 #endif
2688 return insn;
2691 /* Used for interrupt functions, which can't assume that $tp and $gp
2692 contain the correct pointers. */
2694 static void
2695 mep_reload_pointer (int regno, const char *symbol)
2697 rtx reg, sym;
2699 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2700 return;
2702 reg = gen_rtx_REG (SImode, regno);
2703 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2704 emit_insn (gen_movsi_topsym_s (reg, sym));
2705 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2708 /* Assign save slots for any register not already saved. DImode
2709 registers go at the end of the reg save area; the rest go at the
2710 beginning. This is for alignment purposes. Returns true if a frame
2711 is really needed. */
2712 static bool
2713 mep_assign_save_slots (int reg_save_size)
2715 bool really_need_stack_frame = false;
2716 int di_ofs = 0;
2717 int i;
2719 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2720 if (mep_call_saves_register(i))
2722 int regsize = mep_reg_size (i);
2724 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2725 || mep_reg_set_in_function (i))
2726 really_need_stack_frame = true;
2728 if (cfun->machine->reg_save_slot[i])
2729 continue;
2731 if (regsize < 8)
2733 cfun->machine->reg_save_size += regsize;
2734 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2736 else
2738 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2739 di_ofs += 8;
2742 cfun->machine->frame_locked = 1;
2743 return really_need_stack_frame;
2746 void
2747 mep_expand_prologue (void)
2749 int i, rss, sp_offset = 0;
2750 int reg_save_size;
2751 int frame_size;
2752 int really_need_stack_frame;
2754 /* We must not allow register renaming in interrupt functions,
2755 because that invalidates the correctness of the set of call-used
2756 registers we're going to save/restore. */
2757 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2759 if (mep_disinterrupt_p ())
2760 emit_insn (gen_mep_disable_int ());
2762 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2764 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2765 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2766 really_need_stack_frame = frame_size;
2768 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2770 sp_offset = reg_save_size;
2771 if (sp_offset + frame_size < 128)
2772 sp_offset += frame_size ;
2774 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2776 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2777 if (mep_call_saves_register(i))
2779 rtx mem;
2780 bool maybe_dead_p;
2781 enum machine_mode rmode;
2783 rss = cfun->machine->reg_save_slot[i];
2785 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2786 && (!mep_reg_set_in_function (i)
2787 && !mep_interrupt_p ()))
2788 continue;
2790 if (mep_reg_size (i) == 8)
2791 rmode = DImode;
2792 else
2793 rmode = SImode;
2795 /* If there is a pseudo associated with this register's initial value,
2796 reload might have already spilt it to the stack slot suggested by
2797 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2798 deleted as dead. */
2799 mem = gen_rtx_MEM (rmode,
2800 plus_constant (stack_pointer_rtx, sp_offset - rss));
2801 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2803 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2804 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2805 else if (rmode == DImode)
2807 rtx insn;
2808 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2810 mem = gen_rtx_MEM (SImode,
2811 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2813 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2814 gen_rtx_REG (SImode, i),
2815 maybe_dead_p);
2816 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2817 gen_rtx_ZERO_EXTRACT (SImode,
2818 gen_rtx_REG (DImode, i),
2819 GEN_INT (32),
2820 GEN_INT (32)),
2821 maybe_dead_p);
2822 insn = maybe_dead_move (mem,
2823 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2824 maybe_dead_p);
2825 RTX_FRAME_RELATED_P (insn) = 1;
2827 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2828 gen_rtx_SET (VOIDmode,
2829 copy_rtx (mem),
2830 gen_rtx_REG (rmode, i)));
2831 mem = gen_rtx_MEM (SImode,
2832 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2833 insn = maybe_dead_move (mem,
2834 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2835 maybe_dead_p);
2837 else
2839 rtx insn;
2840 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2841 gen_rtx_REG (rmode, i),
2842 maybe_dead_p);
2843 insn = maybe_dead_move (mem,
2844 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2845 maybe_dead_p);
2846 RTX_FRAME_RELATED_P (insn) = 1;
2848 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2849 gen_rtx_SET (VOIDmode,
2850 copy_rtx (mem),
2851 gen_rtx_REG (rmode, i)));
2855 if (frame_pointer_needed)
2857 /* We've already adjusted down by sp_offset. Total $sp change
2858 is reg_save_size + frame_size. We want a net change here of
2859 just reg_save_size. */
2860 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2863 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2865 if (mep_interrupt_p ())
2867 mep_reload_pointer(GP_REGNO, "__sdabase");
2868 mep_reload_pointer(TP_REGNO, "__tpbase");
2872 static void
2873 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2875 int local = hwi_local;
2876 int frame_size = local + crtl->outgoing_args_size;
2877 int reg_save_size;
2878 int ffill;
2879 int i, sp, skip;
2880 int sp_offset;
2881 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2883 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2884 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2885 sp_offset = reg_save_size + frame_size;
2887 ffill = cfun->machine->frame_filler;
2889 if (cfun->machine->mep_frame_pointer_needed)
2890 reg_names[FP_REGNO] = "$fp";
2891 else
2892 reg_names[FP_REGNO] = "$8";
2894 if (sp_offset == 0)
2895 return;
2897 if (debug_info_level == DINFO_LEVEL_NONE)
2899 fprintf (file, "\t# frame: %d", sp_offset);
2900 if (reg_save_size)
2901 fprintf (file, " %d regs", reg_save_size);
2902 if (local)
2903 fprintf (file, " %d locals", local);
2904 if (crtl->outgoing_args_size)
2905 fprintf (file, " %d args", crtl->outgoing_args_size);
2906 fprintf (file, "\n");
2907 return;
2910 fprintf (file, "\t#\n");
2911 fprintf (file, "\t# Initial Frame Information:\n");
2912 if (sp_offset || !frame_pointer_needed)
2913 fprintf (file, "\t# Entry ---------- 0\n");
2915 /* Sort registers by save slots, so they're printed in the order
2916 they appear in memory, not the order they're saved in. */
2917 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2918 slot_map[si] = si;
2919 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2920 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2921 if (cfun->machine->reg_save_slot[slot_map[si]]
2922 > cfun->machine->reg_save_slot[slot_map[sj]])
2924 int t = slot_map[si];
2925 slot_map[si] = slot_map[sj];
2926 slot_map[sj] = t;
2929 sp = 0;
2930 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2932 int rsize;
2933 int r = slot_map[i];
2934 int rss = cfun->machine->reg_save_slot[r];
2936 if (!mep_call_saves_register (r))
2937 continue;
2939 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2940 && (!mep_reg_set_in_function (r)
2941 && !mep_interrupt_p ()))
2942 continue;
2944 rsize = mep_reg_size(r);
2945 skip = rss - (sp+rsize);
2946 if (skip)
2947 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2948 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2949 rsize, reg_names[r], sp_offset - rss);
2950 sp = rss;
2953 skip = reg_save_size - sp;
2954 if (skip)
2955 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2957 if (frame_pointer_needed)
2958 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2959 if (local)
2960 fprintf (file, "\t# %3d bytes for local vars\n", local);
2961 if (ffill)
2962 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2963 if (crtl->outgoing_args_size)
2964 fprintf (file, "\t# %3d bytes for outgoing args\n",
2965 crtl->outgoing_args_size);
2966 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2967 fprintf (file, "\t#\n");
2971 static int mep_prevent_lp_restore = 0;
2972 static int mep_sibcall_epilogue = 0;
2974 void
2975 mep_expand_epilogue (void)
2977 int i, sp_offset = 0;
2978 int reg_save_size = 0;
2979 int frame_size;
2980 int lp_temp = LP_REGNO, lp_slot = -1;
2981 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2982 int interrupt_handler = mep_interrupt_p ();
2984 if (profile_arc_flag == 2)
2985 emit_insn (gen_mep_bb_trace_ret ());
2987 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2988 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2990 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2992 if (frame_pointer_needed)
2994 /* If we have a frame pointer, we won't have a reliable stack
2995 pointer (alloca, you know), so rebase SP from FP */
2996 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2997 gen_rtx_REG (SImode, FP_REGNO));
2998 sp_offset = reg_save_size;
3000 else
3002 /* SP is right under our local variable space. Adjust it if
3003 needed. */
3004 sp_offset = reg_save_size + frame_size;
3005 if (sp_offset >= 128)
3007 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3008 sp_offset -= frame_size;
3012 /* This is backwards so that we restore the control and coprocessor
3013 registers before the temporary registers we use to restore
3014 them. */
3015 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3016 if (mep_call_saves_register (i))
3018 enum machine_mode rmode;
3019 int rss = cfun->machine->reg_save_slot[i];
3021 if (mep_reg_size (i) == 8)
3022 rmode = DImode;
3023 else
3024 rmode = SImode;
3026 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3027 && !(mep_reg_set_in_function (i) || interrupt_handler))
3028 continue;
3029 if (mep_prevent_lp_restore && i == LP_REGNO)
3030 continue;
3031 if (!mep_prevent_lp_restore
3032 && !interrupt_handler
3033 && (i == 10 || i == 11))
3034 continue;
3036 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3037 emit_move_insn (gen_rtx_REG (rmode, i),
3038 gen_rtx_MEM (rmode,
3039 plus_constant (stack_pointer_rtx,
3040 sp_offset-rss)));
3041 else
3043 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3044 /* Defer this one so we can jump indirect rather than
3045 copying the RA to $lp and "ret". EH epilogues
3046 automatically skip this anyway. */
3047 lp_slot = sp_offset-rss;
3048 else
3050 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3051 gen_rtx_MEM (rmode,
3052 plus_constant (stack_pointer_rtx,
3053 sp_offset-rss)));
3054 emit_move_insn (gen_rtx_REG (rmode, i),
3055 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3059 if (lp_slot != -1)
3061 /* Restore this one last so we know it will be in the temp
3062 register when we return by jumping indirectly via the temp. */
3063 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3064 gen_rtx_MEM (SImode,
3065 plus_constant (stack_pointer_rtx,
3066 lp_slot)));
3067 lp_temp = REGSAVE_CONTROL_TEMP;
3071 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3073 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3074 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3075 gen_rtx_REG (SImode, SP_REGNO),
3076 cfun->machine->eh_stack_adjust));
3078 if (mep_sibcall_epilogue)
3079 return;
3081 if (mep_disinterrupt_p ())
3082 emit_insn (gen_mep_enable_int ());
3084 if (mep_prevent_lp_restore)
3086 emit_jump_insn (gen_eh_return_internal ());
3087 emit_barrier ();
3089 else if (interrupt_handler)
3090 emit_jump_insn (gen_mep_reti ());
3091 else
3092 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3095 void
3096 mep_expand_eh_return (rtx *operands)
3098 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3100 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3101 emit_move_insn (ra, operands[0]);
3102 operands[0] = ra;
3105 emit_insn (gen_eh_epilogue (operands[0]));
3108 void
3109 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3111 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3112 mep_prevent_lp_restore = 1;
3113 mep_expand_epilogue ();
3114 mep_prevent_lp_restore = 0;
3117 void
3118 mep_expand_sibcall_epilogue (void)
3120 mep_sibcall_epilogue = 1;
3121 mep_expand_epilogue ();
3122 mep_sibcall_epilogue = 0;
3125 static bool
3126 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3128 if (decl == NULL)
3129 return false;
3131 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3132 return false;
3134 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3135 if (mep_interrupt_p () || mep_disinterrupt_p ())
3136 return false;
3138 return true;
3142 mep_return_stackadj_rtx (void)
3144 return gen_rtx_REG (SImode, 10);
3148 mep_return_handler_rtx (void)
3150 return gen_rtx_REG (SImode, LP_REGNO);
3153 void
3154 mep_function_profiler (FILE *file)
3156 /* Always right at the beginning of the function. */
3157 fprintf (file, "\t# mep function profiler\n");
3158 fprintf (file, "\tadd\t$sp, -8\n");
3159 fprintf (file, "\tsw\t$0, ($sp)\n");
3160 fprintf (file, "\tldc\t$0, $lp\n");
3161 fprintf (file, "\tsw\t$0, 4($sp)\n");
3162 fprintf (file, "\tbsr\t__mep_mcount\n");
3163 fprintf (file, "\tlw\t$0, 4($sp)\n");
3164 fprintf (file, "\tstc\t$0, $lp\n");
3165 fprintf (file, "\tlw\t$0, ($sp)\n");
3166 fprintf (file, "\tadd\t$sp, 8\n\n");
3169 const char *
3170 mep_emit_bb_trace_ret (void)
3172 fprintf (asm_out_file, "\t# end of block profiling\n");
3173 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3174 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3175 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3176 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3177 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3178 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3179 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3180 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3181 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3182 return "";
3185 #undef SAVE
3186 #undef RESTORE
3188 /* Operand Printing. */
3190 void
3191 mep_print_operand_address (FILE *stream, rtx address)
3193 if (GET_CODE (address) == MEM)
3194 address = XEXP (address, 0);
3195 else
3196 /* cf: gcc.dg/asm-4.c. */
3197 gcc_assert (GET_CODE (address) == REG);
3199 mep_print_operand (stream, address, 0);
3202 static struct
3204 char code;
3205 const char *pattern;
3206 const char *format;
3208 const conversions[] =
3210 { 0, "r", "0" },
3211 { 0, "m+ri", "3(2)" },
3212 { 0, "mr", "(1)" },
3213 { 0, "ms", "(1)" },
3214 { 0, "ml", "(1)" },
3215 { 0, "mLrs", "%lo(3)(2)" },
3216 { 0, "mLr+si", "%lo(4+5)(2)" },
3217 { 0, "m+ru2s", "%tpoff(5)(2)" },
3218 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3219 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3220 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3221 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3222 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3223 { 0, "mi", "(1)" },
3224 { 0, "m+si", "(2+3)" },
3225 { 0, "m+li", "(2+3)" },
3226 { 0, "i", "0" },
3227 { 0, "s", "0" },
3228 { 0, "+si", "1+2" },
3229 { 0, "+u2si", "%tpoff(3+4)" },
3230 { 0, "+u3si", "%sdaoff(3+4)" },
3231 { 0, "l", "0" },
3232 { 'b', "i", "0" },
3233 { 'B', "i", "0" },
3234 { 'U', "i", "0" },
3235 { 'h', "i", "0" },
3236 { 'h', "Hs", "%hi(1)" },
3237 { 'I', "i", "0" },
3238 { 'I', "u2s", "%tpoff(2)" },
3239 { 'I', "u3s", "%sdaoff(2)" },
3240 { 'I', "+u2si", "%tpoff(3+4)" },
3241 { 'I', "+u3si", "%sdaoff(3+4)" },
3242 { 'J', "i", "0" },
3243 { 'P', "mr", "(1\\+),\\0" },
3244 { 'x', "i", "0" },
3245 { 0, 0, 0 }
3248 static int
3249 unique_bit_in (HOST_WIDE_INT i)
3251 switch (i & 0xff)
3253 case 0x01: case 0xfe: return 0;
3254 case 0x02: case 0xfd: return 1;
3255 case 0x04: case 0xfb: return 2;
3256 case 0x08: case 0xf7: return 3;
3257 case 0x10: case 0x7f: return 4;
3258 case 0x20: case 0xbf: return 5;
3259 case 0x40: case 0xdf: return 6;
3260 case 0x80: case 0xef: return 7;
3261 default:
3262 gcc_unreachable ();
3266 static int
3267 bit_size_for_clip (HOST_WIDE_INT i)
3269 int rv;
3271 for (rv = 0; rv < 31; rv ++)
3272 if (((HOST_WIDE_INT) 1 << rv) > i)
3273 return rv + 1;
3274 gcc_unreachable ();
3277 /* Print an operand to a assembler instruction. */
3279 void
3280 mep_print_operand (FILE *file, rtx x, int code)
3282 int i, j;
3283 const char *real_name;
3285 if (code == '<')
3287 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3288 we're using, then skip over the "mep_" part of its name. */
3289 const struct cgen_insn *insn;
3291 if (mep_get_move_insn (mep_cmov, &insn))
3292 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3293 else
3294 mep_intrinsic_unavailable (mep_cmov);
3295 return;
3297 if (code == 'L')
3299 switch (GET_CODE (x))
3301 case AND:
3302 fputs ("clr", file);
3303 return;
3304 case IOR:
3305 fputs ("set", file);
3306 return;
3307 case XOR:
3308 fputs ("not", file);
3309 return;
3310 default:
3311 output_operand_lossage ("invalid %%L code");
3314 if (code == 'M')
3316 /* Print the second operand of a CR <- CR move. If we're using
3317 a two-operand instruction (i.e., a real cmov), then just print
3318 the operand normally. If we're using a "reg, reg, immediate"
3319 instruction such as caddi3, print the operand followed by a
3320 zero field. If we're using a three-register instruction,
3321 print the operand twice. */
3322 const struct cgen_insn *insn;
3324 mep_print_operand (file, x, 0);
3325 if (mep_get_move_insn (mep_cmov, &insn)
3326 && insn_data[insn->icode].n_operands == 3)
3328 fputs (", ", file);
3329 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3330 mep_print_operand (file, x, 0);
3331 else
3332 mep_print_operand (file, const0_rtx, 0);
3334 return;
3337 encode_pattern (x);
3338 for (i = 0; conversions[i].pattern; i++)
3339 if (conversions[i].code == code
3340 && strcmp(conversions[i].pattern, pattern) == 0)
3342 for (j = 0; conversions[i].format[j]; j++)
3343 if (conversions[i].format[j] == '\\')
3345 fputc (conversions[i].format[j+1], file);
3346 j++;
3348 else if (ISDIGIT(conversions[i].format[j]))
3350 rtx r = patternr[conversions[i].format[j] - '0'];
3351 switch (GET_CODE (r))
3353 case REG:
3354 fprintf (file, "%s", reg_names [REGNO (r)]);
3355 break;
3356 case CONST_INT:
3357 switch (code)
3359 case 'b':
3360 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3361 break;
3362 case 'B':
3363 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3364 break;
3365 case 'h':
3366 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3367 break;
3368 case 'U':
3369 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3370 break;
3371 case 'J':
3372 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3373 break;
3374 case 'x':
3375 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3376 && !(INTVAL (r) & 0xff))
3377 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3378 else
3379 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3380 break;
3381 case 'I':
3382 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3383 && conversions[i].format[j+1] == 0)
3385 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3386 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3388 else
3389 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3390 break;
3391 default:
3392 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3393 break;
3395 break;
3396 case CONST_DOUBLE:
3397 fprintf(file, "[const_double 0x%lx]",
3398 (unsigned long) CONST_DOUBLE_HIGH(r));
3399 break;
3400 case SYMBOL_REF:
3401 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3402 assemble_name (file, real_name);
3403 break;
3404 case LABEL_REF:
3405 output_asm_label (r);
3406 break;
3407 default:
3408 fprintf (stderr, "don't know how to print this operand:");
3409 debug_rtx (r);
3410 gcc_unreachable ();
3413 else
3415 if (conversions[i].format[j] == '+'
3416 && (!code || code == 'I')
3417 && ISDIGIT (conversions[i].format[j+1])
3418 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3419 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3420 continue;
3421 fputc(conversions[i].format[j], file);
3423 break;
3425 if (!conversions[i].pattern)
3427 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3428 debug_rtx(x);
3431 return;
3434 void
3435 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3436 int noperands ATTRIBUTE_UNUSED)
3438 /* Despite the fact that MeP is perfectly capable of branching and
3439 doing something else in the same bundle, gcc does jump
3440 optimization *after* scheduling, so we cannot trust the bundling
3441 flags on jump instructions. */
3442 if (GET_MODE (insn) == BImode
3443 && get_attr_slots (insn) != SLOTS_CORE)
3444 fputc ('+', asm_out_file);
3447 /* Function args in registers. */
3449 static void
3450 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3451 enum machine_mode mode ATTRIBUTE_UNUSED,
3452 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3453 int second_time ATTRIBUTE_UNUSED)
3455 int nsave = 4 - (cum->nregs + 1);
3457 if (nsave > 0)
3458 cfun->machine->arg_regs_to_save = nsave;
3459 *pretend_size = nsave * 4;
3462 static int
3463 bytesize (const_tree type, enum machine_mode mode)
3465 if (mode == BLKmode)
3466 return int_size_in_bytes (type);
3467 return GET_MODE_SIZE (mode);
3470 static rtx
3471 mep_expand_builtin_saveregs (void)
3473 int bufsize, i, ns;
3474 rtx regbuf;
3476 ns = cfun->machine->arg_regs_to_save;
3477 if (TARGET_IVC2)
3479 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3480 regbuf = assign_stack_local (SImode, bufsize, 64);
3482 else
3484 bufsize = ns * 4;
3485 regbuf = assign_stack_local (SImode, bufsize, 32);
3488 move_block_from_reg (5-ns, regbuf, ns);
3490 if (TARGET_IVC2)
3492 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3493 int ofs = 8 * ((ns+1)/2);
3495 for (i=0; i<ns; i++)
3497 int rn = (4-ns) + i + 49;
3498 rtx ptr;
3500 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3501 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3502 ofs += 8;
3505 return XEXP (regbuf, 0);
3508 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3510 static tree
3511 mep_build_builtin_va_list (void)
3513 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3514 tree record;
3517 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3519 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3520 get_identifier ("__va_next_gp"), ptr_type_node);
3521 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3522 get_identifier ("__va_next_gp_limit"),
3523 ptr_type_node);
3524 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3525 ptr_type_node);
3526 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3527 ptr_type_node);
3529 DECL_FIELD_CONTEXT (f_next_gp) = record;
3530 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3531 DECL_FIELD_CONTEXT (f_next_cop) = record;
3532 DECL_FIELD_CONTEXT (f_next_stack) = record;
3534 TYPE_FIELDS (record) = f_next_gp;
3535 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3536 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3537 DECL_CHAIN (f_next_cop) = f_next_stack;
3539 layout_type (record);
3541 return record;
3544 static void
3545 mep_expand_va_start (tree valist, rtx nextarg)
3547 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3548 tree next_gp, next_gp_limit, next_cop, next_stack;
3549 tree t, u;
3550 int ns;
3552 ns = cfun->machine->arg_regs_to_save;
3554 f_next_gp = TYPE_FIELDS (va_list_type_node);
3555 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3556 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3557 f_next_stack = DECL_CHAIN (f_next_cop);
3559 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3560 NULL_TREE);
3561 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3562 valist, f_next_gp_limit, NULL_TREE);
3563 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3564 NULL_TREE);
3565 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3566 valist, f_next_stack, NULL_TREE);
3568 /* va_list.next_gp = expand_builtin_saveregs (); */
3569 u = make_tree (sizetype, expand_builtin_saveregs ());
3570 u = fold_convert (ptr_type_node, u);
3571 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3572 TREE_SIDE_EFFECTS (t) = 1;
3573 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3575 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3576 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3577 size_int (4 * ns));
3578 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3579 TREE_SIDE_EFFECTS (t) = 1;
3580 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3582 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3583 size_int (8 * ((ns+1)/2)));
3584 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3585 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3586 TREE_SIDE_EFFECTS (t) = 1;
3587 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3589 /* va_list.next_stack = nextarg; */
3590 u = make_tree (ptr_type_node, nextarg);
3591 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3592 TREE_SIDE_EFFECTS (t) = 1;
3593 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3596 static tree
3597 mep_gimplify_va_arg_expr (tree valist, tree type,
3598 gimple_seq *pre_p,
3599 gimple_seq *post_p ATTRIBUTE_UNUSED)
3601 HOST_WIDE_INT size, rsize;
3602 bool by_reference, ivc2_vec;
3603 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3604 tree next_gp, next_gp_limit, next_cop, next_stack;
3605 tree label_sover, label_selse;
3606 tree tmp, res_addr;
3608 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3610 size = int_size_in_bytes (type);
3611 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3613 if (by_reference)
3615 type = build_pointer_type (type);
3616 size = 4;
3618 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3620 f_next_gp = TYPE_FIELDS (va_list_type_node);
3621 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3622 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3623 f_next_stack = DECL_CHAIN (f_next_cop);
3625 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3626 NULL_TREE);
3627 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3628 valist, f_next_gp_limit, NULL_TREE);
3629 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3630 NULL_TREE);
3631 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3632 valist, f_next_stack, NULL_TREE);
3634 /* if f_next_gp < f_next_gp_limit
3635 IF (VECTOR_P && IVC2)
3636 val = *f_next_cop;
3637 ELSE
3638 val = *f_next_gp;
3639 f_next_gp += 4;
3640 f_next_cop += 8;
3641 else
3642 label_selse:
3643 val = *f_next_stack;
3644 f_next_stack += rsize;
3645 label_sover:
3648 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3649 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3650 res_addr = create_tmp_var (ptr_type_node, NULL);
3652 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3653 unshare_expr (next_gp_limit));
3654 tmp = build3 (COND_EXPR, void_type_node, tmp,
3655 build1 (GOTO_EXPR, void_type_node,
3656 unshare_expr (label_selse)),
3657 NULL_TREE);
3658 gimplify_and_add (tmp, pre_p);
3660 if (ivc2_vec)
3662 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3663 gimplify_and_add (tmp, pre_p);
3665 else
3667 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3668 gimplify_and_add (tmp, pre_p);
3671 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3672 unshare_expr (next_gp), size_int (4));
3673 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3675 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3676 unshare_expr (next_cop), size_int (8));
3677 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3679 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3680 gimplify_and_add (tmp, pre_p);
3682 /* - - */
3684 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3685 gimplify_and_add (tmp, pre_p);
3687 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3688 gimplify_and_add (tmp, pre_p);
3690 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3691 unshare_expr (next_stack), size_int (rsize));
3692 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3694 /* - - */
3696 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3697 gimplify_and_add (tmp, pre_p);
3699 res_addr = fold_convert (build_pointer_type (type), res_addr);
3701 if (by_reference)
3702 res_addr = build_va_arg_indirect_ref (res_addr);
3704 return build_va_arg_indirect_ref (res_addr);
3707 void
3708 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3709 rtx libname ATTRIBUTE_UNUSED,
3710 tree fndecl ATTRIBUTE_UNUSED)
3712 pcum->nregs = 0;
3714 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3715 pcum->vliw = 1;
3716 else
3717 pcum->vliw = 0;
3721 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3722 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3724 /* VOIDmode is a signal for the backend to pass data to the call
3725 expander via the second operand to the call pattern. We use
3726 this to determine whether to use "jsr" or "jsrv". */
3727 if (mode == VOIDmode)
3728 return GEN_INT (cum.vliw);
3730 /* If we havn't run out of argument registers, return the next. */
3731 if (cum.nregs < 4)
3733 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3734 return gen_rtx_REG (mode, cum.nregs + 49);
3735 else
3736 return gen_rtx_REG (mode, cum.nregs + 1);
3739 /* Otherwise the argument goes on the stack. */
3740 return NULL_RTX;
3743 static bool
3744 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3745 enum machine_mode mode,
3746 const_tree type,
3747 bool named ATTRIBUTE_UNUSED)
3749 int size = bytesize (type, mode);
3751 /* This is non-obvious, but yes, large values passed after we've run
3752 out of registers are *still* passed by reference - we put the
3753 address of the parameter on the stack, as well as putting the
3754 parameter itself elsewhere on the stack. */
3756 if (size <= 0 || size > 8)
3757 return true;
3758 if (size <= 4)
3759 return false;
3760 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3761 return false;
3762 return true;
3765 void
3766 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3767 enum machine_mode mode ATTRIBUTE_UNUSED,
3768 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3770 pcum->nregs += 1;
3773 bool
3774 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3776 int size = bytesize (type, BLKmode);
3777 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3778 return size > 0 && size <= 8 ? 0 : 1;
3779 return size > 0 && size <= 4 ? 0 : 1;
3782 static bool
3783 mep_narrow_volatile_bitfield (void)
3785 return true;
3786 return false;
3789 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3792 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3794 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3795 return gen_rtx_REG (TYPE_MODE (type), 48);
3796 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3799 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3802 mep_libcall_value (enum machine_mode mode)
3804 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3807 /* Handle pipeline hazards. */
3809 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3810 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3812 static int prev_opcode = 0;
3814 /* This isn't as optimal as it could be, because we don't know what
3815 control register the STC opcode is storing in. We only need to add
3816 the nop if it's the relevent register, but we add it for irrelevent
3817 registers also. */
3819 void
3820 mep_asm_output_opcode (FILE *file, const char *ptr)
3822 int this_opcode = op_none;
3823 const char *hazard = 0;
3825 switch (*ptr)
3827 case 'f':
3828 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3829 this_opcode = op_fsft;
3830 break;
3831 case 'r':
3832 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3833 this_opcode = op_ret;
3834 break;
3835 case 's':
3836 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3837 this_opcode = op_stc;
3838 break;
3841 if (prev_opcode == op_stc && this_opcode == op_fsft)
3842 hazard = "nop";
3843 if (prev_opcode == op_stc && this_opcode == op_ret)
3844 hazard = "nop";
3846 if (hazard)
3847 fprintf(file, "%s\t# %s-%s hazard\n\t",
3848 hazard, opnames[prev_opcode], opnames[this_opcode]);
3850 prev_opcode = this_opcode;
3853 /* Handle attributes. */
3855 static tree
3856 mep_validate_based_tiny (tree *node, tree name, tree args,
3857 int flags ATTRIBUTE_UNUSED, bool *no_add)
3859 if (TREE_CODE (*node) != VAR_DECL
3860 && TREE_CODE (*node) != POINTER_TYPE
3861 && TREE_CODE (*node) != TYPE_DECL)
3863 warning (0, "%qE attribute only applies to variables", name);
3864 *no_add = true;
3866 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3868 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3870 warning (0, "address region attributes not allowed with auto storage class");
3871 *no_add = true;
3873 /* Ignore storage attribute of pointed to variable: char __far * x; */
3874 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3876 warning (0, "address region attributes on pointed-to types ignored");
3877 *no_add = true;
3881 return NULL_TREE;
3884 static int
3885 mep_multiple_address_regions (tree list, bool check_section_attr)
3887 tree a;
3888 int count_sections = 0;
3889 int section_attr_count = 0;
3891 for (a = list; a; a = TREE_CHAIN (a))
3893 if (is_attribute_p ("based", TREE_PURPOSE (a))
3894 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3895 || is_attribute_p ("near", TREE_PURPOSE (a))
3896 || is_attribute_p ("far", TREE_PURPOSE (a))
3897 || is_attribute_p ("io", TREE_PURPOSE (a)))
3898 count_sections ++;
3899 if (check_section_attr)
3900 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3903 if (check_section_attr)
3904 return section_attr_count;
3905 else
3906 return count_sections;
3909 #define MEP_ATTRIBUTES(decl) \
3910 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3911 : DECL_ATTRIBUTES (decl) \
3912 ? (DECL_ATTRIBUTES (decl)) \
3913 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3915 static tree
3916 mep_validate_near_far (tree *node, tree name, tree args,
3917 int flags ATTRIBUTE_UNUSED, bool *no_add)
3919 if (TREE_CODE (*node) != VAR_DECL
3920 && TREE_CODE (*node) != FUNCTION_DECL
3921 && TREE_CODE (*node) != METHOD_TYPE
3922 && TREE_CODE (*node) != POINTER_TYPE
3923 && TREE_CODE (*node) != TYPE_DECL)
3925 warning (0, "%qE attribute only applies to variables and functions",
3926 name);
3927 *no_add = true;
3929 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3931 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3933 warning (0, "address region attributes not allowed with auto storage class");
3934 *no_add = true;
3936 /* Ignore storage attribute of pointed to variable: char __far * x; */
3937 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3939 warning (0, "address region attributes on pointed-to types ignored");
3940 *no_add = true;
3943 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3945 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3946 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3947 DECL_ATTRIBUTES (*node) = NULL_TREE;
3949 return NULL_TREE;
3952 static tree
3953 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3954 int flags ATTRIBUTE_UNUSED, bool *no_add)
3956 if (TREE_CODE (*node) != FUNCTION_DECL
3957 && TREE_CODE (*node) != METHOD_TYPE)
3959 warning (0, "%qE attribute only applies to functions", name);
3960 *no_add = true;
3962 return NULL_TREE;
3965 static tree
3966 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3967 int flags ATTRIBUTE_UNUSED, bool *no_add)
3969 tree function_type;
3971 if (TREE_CODE (*node) != FUNCTION_DECL)
3973 warning (0, "%qE attribute only applies to functions", name);
3974 *no_add = true;
3975 return NULL_TREE;
3978 if (DECL_DECLARED_INLINE_P (*node))
3979 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3980 DECL_UNINLINABLE (*node) = 1;
3982 function_type = TREE_TYPE (*node);
3984 if (TREE_TYPE (function_type) != void_type_node)
3985 error ("interrupt function must have return type of void");
3987 if (TYPE_ARG_TYPES (function_type)
3988 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3989 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3990 error ("interrupt function must have no arguments");
3992 return NULL_TREE;
3995 static tree
3996 mep_validate_io_cb (tree *node, tree name, tree args,
3997 int flags ATTRIBUTE_UNUSED, bool *no_add)
3999 if (TREE_CODE (*node) != VAR_DECL)
4001 warning (0, "%qE attribute only applies to variables", name);
4002 *no_add = true;
4005 if (args != NULL_TREE)
4007 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4008 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4009 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4011 warning (0, "%qE attribute allows only an integer constant argument",
4012 name);
4013 *no_add = true;
4017 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4018 TREE_THIS_VOLATILE (*node) = 1;
4020 return NULL_TREE;
4023 static tree
4024 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4025 int flags ATTRIBUTE_UNUSED, bool *no_add)
4027 if (TREE_CODE (*node) != FUNCTION_TYPE
4028 && TREE_CODE (*node) != FUNCTION_DECL
4029 && TREE_CODE (*node) != METHOD_TYPE
4030 && TREE_CODE (*node) != FIELD_DECL
4031 && TREE_CODE (*node) != TYPE_DECL)
4033 static int gave_pointer_note = 0;
4034 static int gave_array_note = 0;
4035 static const char * given_type = NULL;
4037 given_type = tree_code_name[TREE_CODE (*node)];
4038 if (TREE_CODE (*node) == POINTER_TYPE)
4039 given_type = "pointers";
4040 if (TREE_CODE (*node) == ARRAY_TYPE)
4041 given_type = "arrays";
4043 if (given_type)
4044 warning (0, "%qE attribute only applies to functions, not %s",
4045 name, given_type);
4046 else
4047 warning (0, "%qE attribute only applies to functions",
4048 name);
4049 *no_add = true;
4051 if (TREE_CODE (*node) == POINTER_TYPE
4052 && !gave_pointer_note)
4054 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4055 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4056 gave_pointer_note = 1;
4059 if (TREE_CODE (*node) == ARRAY_TYPE
4060 && !gave_array_note)
4062 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4063 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4064 gave_array_note = 1;
4067 if (!TARGET_VLIW)
4068 error ("VLIW functions are not allowed without a VLIW configuration");
4069 return NULL_TREE;
4072 static const struct attribute_spec mep_attribute_table[11] =
4074 /* name min max decl type func handler */
4075 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4076 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4077 { "near", 0, 0, false, false, false, mep_validate_near_far },
4078 { "far", 0, 0, false, false, false, mep_validate_near_far },
4079 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4080 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4081 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4082 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4083 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4084 { NULL, 0, 0, false, false, false, NULL }
4087 static bool
4088 mep_function_attribute_inlinable_p (const_tree callee)
4090 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4091 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4092 return (lookup_attribute ("disinterrupt", attrs) == 0
4093 && lookup_attribute ("interrupt", attrs) == 0);
4096 static bool
4097 mep_can_inline_p (tree caller, tree callee)
4099 if (TREE_CODE (callee) == ADDR_EXPR)
4100 callee = TREE_OPERAND (callee, 0);
4102 if (!mep_vliw_function_p (caller)
4103 && mep_vliw_function_p (callee))
4105 return false;
4107 return true;
4110 #define FUNC_CALL 1
4111 #define FUNC_DISINTERRUPT 2
4114 struct GTY(()) pragma_entry {
4115 int used;
4116 int flag;
4117 const char *funcname;
4119 typedef struct pragma_entry pragma_entry;
4121 /* Hash table of farcall-tagged sections. */
4122 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4124 static int
4125 pragma_entry_eq (const void *p1, const void *p2)
4127 const pragma_entry *old = (const pragma_entry *) p1;
4128 const char *new_name = (const char *) p2;
4130 return strcmp (old->funcname, new_name) == 0;
4133 static hashval_t
4134 pragma_entry_hash (const void *p)
4136 const pragma_entry *old = (const pragma_entry *) p;
4137 return htab_hash_string (old->funcname);
4140 static void
4141 mep_note_pragma_flag (const char *funcname, int flag)
4143 pragma_entry **slot;
4145 if (!pragma_htab)
4146 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4147 pragma_entry_eq, NULL);
4149 slot = (pragma_entry **)
4150 htab_find_slot_with_hash (pragma_htab, funcname,
4151 htab_hash_string (funcname), INSERT);
4153 if (!*slot)
4155 *slot = ggc_alloc_pragma_entry ();
4156 (*slot)->flag = 0;
4157 (*slot)->used = 0;
4158 (*slot)->funcname = ggc_strdup (funcname);
4160 (*slot)->flag |= flag;
4163 static bool
4164 mep_lookup_pragma_flag (const char *funcname, int flag)
4166 pragma_entry **slot;
4168 if (!pragma_htab)
4169 return false;
4171 if (funcname[0] == '@' && funcname[2] == '.')
4172 funcname += 3;
4174 slot = (pragma_entry **)
4175 htab_find_slot_with_hash (pragma_htab, funcname,
4176 htab_hash_string (funcname), NO_INSERT);
4177 if (slot && *slot && ((*slot)->flag & flag))
4179 (*slot)->used |= flag;
4180 return true;
4182 return false;
4185 bool
4186 mep_lookup_pragma_call (const char *funcname)
4188 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4191 void
4192 mep_note_pragma_call (const char *funcname)
4194 mep_note_pragma_flag (funcname, FUNC_CALL);
4197 bool
4198 mep_lookup_pragma_disinterrupt (const char *funcname)
4200 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4203 void
4204 mep_note_pragma_disinterrupt (const char *funcname)
4206 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4209 static int
4210 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4212 const pragma_entry *d = (const pragma_entry *)(*slot);
4214 if ((d->flag & FUNC_DISINTERRUPT)
4215 && !(d->used & FUNC_DISINTERRUPT))
4216 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4217 return 1;
4220 void
4221 mep_file_cleanups (void)
4223 if (pragma_htab)
4224 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4227 /* These three functions provide a bridge between the pramgas that
4228 affect register classes, and the functions that maintain them. We
4229 can't call those functions directly as pragma handling is part of
4230 the front end and doesn't have direct access to them. */
4232 void
4233 mep_save_register_info (void)
4235 save_register_info ();
4238 void
4239 mep_reinit_regs (void)
4241 reinit_regs ();
4244 void
4245 mep_init_regs (void)
4247 init_regs ();
4252 static int
4253 mep_attrlist_to_encoding (tree list, tree decl)
4255 if (mep_multiple_address_regions (list, false) > 1)
4257 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4258 TREE_PURPOSE (TREE_CHAIN (list)),
4259 DECL_NAME (decl),
4260 DECL_SOURCE_LINE (decl));
4261 TREE_CHAIN (list) = NULL_TREE;
4264 while (list)
4266 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4267 return 'b';
4268 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4269 return 't';
4270 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4271 return 'n';
4272 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4273 return 'f';
4274 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4276 if (TREE_VALUE (list)
4277 && TREE_VALUE (TREE_VALUE (list))
4278 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4280 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4281 if (location >= 0
4282 && location <= 0x1000000)
4283 return 'i';
4285 return 'I';
4287 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4288 return 'c';
4289 list = TREE_CHAIN (list);
4291 if (TARGET_TF
4292 && TREE_CODE (decl) == FUNCTION_DECL
4293 && DECL_SECTION_NAME (decl) == 0)
4294 return 'f';
4295 return 0;
4298 static int
4299 mep_comp_type_attributes (const_tree t1, const_tree t2)
4301 int vliw1, vliw2;
4303 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4304 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4306 if (vliw1 != vliw2)
4307 return 0;
4309 return 1;
4312 static void
4313 mep_insert_attributes (tree decl, tree *attributes)
4315 int size;
4316 const char *secname = 0;
4317 tree attrib, attrlist;
4318 char encoding;
4320 if (TREE_CODE (decl) == FUNCTION_DECL)
4322 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4324 if (mep_lookup_pragma_disinterrupt (funcname))
4326 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4327 *attributes = chainon (*attributes, attrib);
4331 if (TREE_CODE (decl) != VAR_DECL
4332 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4333 return;
4335 if (TREE_READONLY (decl) && TARGET_DC)
4336 /* -mdc means that const variables default to the near section,
4337 regardless of the size cutoff. */
4338 return;
4340 /* User specified an attribute, so override the default.
4341 Ignore storage attribute of pointed to variable. char __far * x; */
4342 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4344 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4345 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4346 else if (DECL_ATTRIBUTES (decl) && *attributes)
4347 DECL_ATTRIBUTES (decl) = NULL_TREE;
4350 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4351 encoding = mep_attrlist_to_encoding (attrlist, decl);
4352 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4354 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4355 encoding = mep_attrlist_to_encoding (attrlist, decl);
4357 if (encoding)
4359 /* This means that the declaration has a specific section
4360 attribute, so we should not apply the default rules. */
4362 if (encoding == 'i' || encoding == 'I')
4364 tree attr = lookup_attribute ("io", attrlist);
4365 if (attr
4366 && TREE_VALUE (attr)
4367 && TREE_VALUE (TREE_VALUE(attr)))
4369 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4370 static tree previous_value = 0;
4371 static int previous_location = 0;
4372 static tree previous_name = 0;
4374 /* We take advantage of the fact that gcc will reuse the
4375 same tree pointer when applying an attribute to a
4376 list of decls, but produce a new tree for attributes
4377 on separate source lines, even when they're textually
4378 identical. This is the behavior we want. */
4379 if (TREE_VALUE (attr) == previous_value
4380 && location == previous_location)
4382 warning(0, "__io address 0x%x is the same for %qE and %qE",
4383 location, previous_name, DECL_NAME (decl));
4385 previous_name = DECL_NAME (decl);
4386 previous_location = location;
4387 previous_value = TREE_VALUE (attr);
4390 return;
4394 /* Declarations of arrays can change size. Don't trust them. */
4395 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4396 size = 0;
4397 else
4398 size = int_size_in_bytes (TREE_TYPE (decl));
4400 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4402 if (TREE_PUBLIC (decl)
4403 || DECL_EXTERNAL (decl)
4404 || TREE_STATIC (decl))
4406 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4407 int key = 0;
4409 while (*name)
4410 key += *name++;
4412 switch (key & 3)
4414 case 0:
4415 secname = "based";
4416 break;
4417 case 1:
4418 secname = "tiny";
4419 break;
4420 case 2:
4421 secname = "far";
4422 break;
4423 default:
4428 else
4430 if (size <= mep_based_cutoff && size > 0)
4431 secname = "based";
4432 else if (size <= mep_tiny_cutoff && size > 0)
4433 secname = "tiny";
4434 else if (TARGET_L)
4435 secname = "far";
4438 if (mep_const_section && TREE_READONLY (decl))
4440 if (strcmp (mep_const_section, "tiny") == 0)
4441 secname = "tiny";
4442 else if (strcmp (mep_const_section, "near") == 0)
4443 return;
4444 else if (strcmp (mep_const_section, "far") == 0)
4445 secname = "far";
4448 if (!secname)
4449 return;
4451 if (!mep_multiple_address_regions (*attributes, true)
4452 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4454 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4456 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4457 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4458 and mep_validate_based_tiny. */
4459 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4463 static void
4464 mep_encode_section_info (tree decl, rtx rtl, int first)
4466 rtx rtlname;
4467 const char *oldname;
4468 const char *secname;
4469 char encoding;
4470 char *newname;
4471 tree idp;
4472 int maxsize;
4473 tree type;
4474 tree mep_attributes;
4476 if (! first)
4477 return;
4479 if (TREE_CODE (decl) != VAR_DECL
4480 && TREE_CODE (decl) != FUNCTION_DECL)
4481 return;
4483 rtlname = XEXP (rtl, 0);
4484 if (GET_CODE (rtlname) == SYMBOL_REF)
4485 oldname = XSTR (rtlname, 0);
4486 else if (GET_CODE (rtlname) == MEM
4487 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4488 oldname = XSTR (XEXP (rtlname, 0), 0);
4489 else
4490 gcc_unreachable ();
4492 type = TREE_TYPE (decl);
4493 if (type == error_mark_node)
4494 return;
4495 mep_attributes = MEP_ATTRIBUTES (decl);
4497 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4499 if (encoding)
4501 newname = (char *) alloca (strlen (oldname) + 4);
4502 sprintf (newname, "@%c.%s", encoding, oldname);
4503 idp = get_identifier (newname);
4504 XEXP (rtl, 0) =
4505 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4506 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4507 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4509 switch (encoding)
4511 case 'b':
4512 maxsize = 128;
4513 secname = "based";
4514 break;
4515 case 't':
4516 maxsize = 65536;
4517 secname = "tiny";
4518 break;
4519 case 'n':
4520 maxsize = 0x1000000;
4521 secname = "near";
4522 break;
4523 default:
4524 maxsize = 0;
4525 secname = 0;
4526 break;
4528 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4530 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4531 oldname,
4532 (long) int_size_in_bytes (TREE_TYPE (decl)),
4533 secname,
4534 maxsize);
4539 const char *
4540 mep_strip_name_encoding (const char *sym)
4542 while (1)
4544 if (*sym == '*')
4545 sym++;
4546 else if (*sym == '@' && sym[2] == '.')
4547 sym += 3;
4548 else
4549 return sym;
4553 static section *
4554 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4555 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4557 int readonly = 1;
4558 int encoding;
4560 switch (TREE_CODE (decl))
4562 case VAR_DECL:
4563 if (!TREE_READONLY (decl)
4564 || TREE_SIDE_EFFECTS (decl)
4565 || !DECL_INITIAL (decl)
4566 || (DECL_INITIAL (decl) != error_mark_node
4567 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4568 readonly = 0;
4569 break;
4570 case CONSTRUCTOR:
4571 if (! TREE_CONSTANT (decl))
4572 readonly = 0;
4573 break;
4575 default:
4576 break;
4579 if (TREE_CODE (decl) == FUNCTION_DECL)
4581 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4583 if (name[0] == '@' && name[2] == '.')
4584 encoding = name[1];
4585 else
4586 encoding = 0;
4588 if (flag_function_sections || DECL_ONE_ONLY (decl))
4589 mep_unique_section (decl, 0);
4590 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4592 if (encoding == 'f')
4593 return vftext_section;
4594 else
4595 return vtext_section;
4597 else if (encoding == 'f')
4598 return ftext_section;
4599 else
4600 return text_section;
4603 if (TREE_CODE (decl) == VAR_DECL)
4605 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4607 if (name[0] == '@' && name[2] == '.')
4608 switch (name[1])
4610 case 'b':
4611 return based_section;
4613 case 't':
4614 if (readonly)
4615 return srodata_section;
4616 if (DECL_INITIAL (decl))
4617 return sdata_section;
4618 return tinybss_section;
4620 case 'f':
4621 if (readonly)
4622 return frodata_section;
4623 return far_section;
4625 case 'i':
4626 case 'I':
4627 error_at (DECL_SOURCE_LOCATION (decl),
4628 "variable %D of type %<io%> must be uninitialized", decl);
4629 return data_section;
4631 case 'c':
4632 error_at (DECL_SOURCE_LOCATION (decl),
4633 "variable %D of type %<cb%> must be uninitialized", decl);
4634 return data_section;
4638 if (readonly)
4639 return readonly_data_section;
4641 return data_section;
4644 static void
4645 mep_unique_section (tree decl, int reloc)
4647 static const char *prefixes[][2] =
4649 { ".text.", ".gnu.linkonce.t." },
4650 { ".rodata.", ".gnu.linkonce.r." },
4651 { ".data.", ".gnu.linkonce.d." },
4652 { ".based.", ".gnu.linkonce.based." },
4653 { ".sdata.", ".gnu.linkonce.s." },
4654 { ".far.", ".gnu.linkonce.far." },
4655 { ".ftext.", ".gnu.linkonce.ft." },
4656 { ".frodata.", ".gnu.linkonce.frd." },
4657 { ".srodata.", ".gnu.linkonce.srd." },
4658 { ".vtext.", ".gnu.linkonce.v." },
4659 { ".vftext.", ".gnu.linkonce.vf." }
4661 int sec = 2; /* .data */
4662 int len;
4663 const char *name, *prefix;
4664 char *string;
4666 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4667 if (DECL_RTL (decl))
4668 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4670 if (TREE_CODE (decl) == FUNCTION_DECL)
4672 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4673 sec = 9; /* .vtext */
4674 else
4675 sec = 0; /* .text */
4677 else if (decl_readonly_section (decl, reloc))
4678 sec = 1; /* .rodata */
4680 if (name[0] == '@' && name[2] == '.')
4682 switch (name[1])
4684 case 'b':
4685 sec = 3; /* .based */
4686 break;
4687 case 't':
4688 if (sec == 1)
4689 sec = 8; /* .srodata */
4690 else
4691 sec = 4; /* .sdata */
4692 break;
4693 case 'f':
4694 if (sec == 0)
4695 sec = 6; /* .ftext */
4696 else if (sec == 9)
4697 sec = 10; /* .vftext */
4698 else if (sec == 1)
4699 sec = 7; /* .frodata */
4700 else
4701 sec = 5; /* .far. */
4702 break;
4704 name += 3;
4707 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4708 len = strlen (name) + strlen (prefix);
4709 string = (char *) alloca (len + 1);
4711 sprintf (string, "%s%s", prefix, name);
4713 DECL_SECTION_NAME (decl) = build_string (len, string);
4716 /* Given a decl, a section name, and whether the decl initializer
4717 has relocs, choose attributes for the section. */
4719 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4721 static unsigned int
4722 mep_section_type_flags (tree decl, const char *name, int reloc)
4724 unsigned int flags = default_section_type_flags (decl, name, reloc);
4726 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4727 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4728 flags |= SECTION_MEP_VLIW;
4730 return flags;
4733 /* Switch to an arbitrary section NAME with attributes as specified
4734 by FLAGS. ALIGN specifies any known alignment requirements for
4735 the section; 0 if the default should be used.
4737 Differs from the standard ELF version only in support of VLIW mode. */
4739 static void
4740 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4742 char flagchars[8], *f = flagchars;
4743 const char *type;
4745 if (!(flags & SECTION_DEBUG))
4746 *f++ = 'a';
4747 if (flags & SECTION_WRITE)
4748 *f++ = 'w';
4749 if (flags & SECTION_CODE)
4750 *f++ = 'x';
4751 if (flags & SECTION_SMALL)
4752 *f++ = 's';
4753 if (flags & SECTION_MEP_VLIW)
4754 *f++ = 'v';
4755 *f = '\0';
4757 if (flags & SECTION_BSS)
4758 type = "nobits";
4759 else
4760 type = "progbits";
4762 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4763 name, flagchars, type);
4765 if (flags & SECTION_CODE)
4766 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4767 asm_out_file);
4770 void
4771 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4772 int size, int align, int global)
4774 /* We intentionally don't use mep_section_tag() here. */
4775 if (name[0] == '@'
4776 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4777 && name[2] == '.')
4779 int location = -1;
4780 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4781 DECL_ATTRIBUTES (decl));
4782 if (attr
4783 && TREE_VALUE (attr)
4784 && TREE_VALUE (TREE_VALUE(attr)))
4785 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4786 if (location == -1)
4787 return;
4788 if (global)
4790 fprintf (stream, "\t.globl\t");
4791 assemble_name (stream, name);
4792 fprintf (stream, "\n");
4794 assemble_name (stream, name);
4795 fprintf (stream, " = %d\n", location);
4796 return;
4798 if (name[0] == '@' && name[2] == '.')
4800 const char *sec = 0;
4801 switch (name[1])
4803 case 'b':
4804 switch_to_section (based_section);
4805 sec = ".based";
4806 break;
4807 case 't':
4808 switch_to_section (tinybss_section);
4809 sec = ".sbss";
4810 break;
4811 case 'f':
4812 switch_to_section (farbss_section);
4813 sec = ".farbss";
4814 break;
4816 if (sec)
4818 const char *name2;
4819 int p2align = 0;
4821 while (align > BITS_PER_UNIT)
4823 align /= 2;
4824 p2align ++;
4826 name2 = TARGET_STRIP_NAME_ENCODING (name);
4827 if (global)
4828 fprintf (stream, "\t.globl\t%s\n", name2);
4829 fprintf (stream, "\t.p2align %d\n", p2align);
4830 fprintf (stream, "\t.type\t%s,@object\n", name2);
4831 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4832 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4833 return;
4837 if (!global)
4839 fprintf (stream, "\t.local\t");
4840 assemble_name (stream, name);
4841 fprintf (stream, "\n");
4843 fprintf (stream, "\t.comm\t");
4844 assemble_name (stream, name);
4845 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4848 /* Trampolines. */
4850 static void
4851 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4853 rtx addr = XEXP (m_tramp, 0);
4854 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4856 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4857 LCT_NORMAL, VOIDmode, 3,
4858 addr, Pmode,
4859 fnaddr, Pmode,
4860 static_chain, Pmode);
4863 /* Experimental Reorg. */
4865 static bool
4866 mep_mentioned_p (rtx in,
4867 rtx reg, /* NULL for mem */
4868 int modes_too) /* if nonzero, modes must match also. */
4870 const char *fmt;
4871 int i;
4872 enum rtx_code code;
4874 if (in == 0)
4875 return false;
4876 if (reg && GET_CODE (reg) != REG)
4877 return false;
4879 if (GET_CODE (in) == LABEL_REF)
4880 return (reg == 0);
4882 code = GET_CODE (in);
4884 switch (code)
4886 case MEM:
4887 if (reg)
4888 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4889 return true;
4891 case REG:
4892 if (!reg)
4893 return false;
4894 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4895 return false;
4896 return (REGNO (in) == REGNO (reg));
4898 case SCRATCH:
4899 case CC0:
4900 case PC:
4901 case CONST_INT:
4902 case CONST_DOUBLE:
4903 return false;
4905 default:
4906 break;
4909 /* Set's source should be read-only. */
4910 if (code == SET && !reg)
4911 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4913 fmt = GET_RTX_FORMAT (code);
4915 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4917 if (fmt[i] == 'E')
4919 register int j;
4920 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4921 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4922 return true;
4924 else if (fmt[i] == 'e'
4925 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4926 return true;
4928 return false;
4931 #define EXPERIMENTAL_REGMOVE_REORG 1
4933 #if EXPERIMENTAL_REGMOVE_REORG
4935 static int
4936 mep_compatible_reg_class (int r1, int r2)
4938 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4939 return 1;
4940 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4941 return 1;
4942 return 0;
4945 static void
4946 mep_reorg_regmove (rtx insns)
4948 rtx insn, next, pat, follow, *where;
4949 int count = 0, done = 0, replace, before = 0;
4951 if (dump_file)
4952 for (insn = insns; insn; insn = NEXT_INSN (insn))
4953 if (GET_CODE (insn) == INSN)
4954 before++;
4956 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4957 set that uses the r2 and r2 dies there. We replace r2 with r1
4958 and see if it's still a valid insn. If so, delete the first set.
4959 Copied from reorg.c. */
4961 while (!done)
4963 done = 1;
4964 for (insn = insns; insn; insn = next)
4966 next = NEXT_INSN (insn);
4967 if (GET_CODE (insn) != INSN)
4968 continue;
4969 pat = PATTERN (insn);
4971 replace = 0;
4973 if (GET_CODE (pat) == SET
4974 && GET_CODE (SET_SRC (pat)) == REG
4975 && GET_CODE (SET_DEST (pat)) == REG
4976 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4977 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4979 follow = next_nonnote_insn (insn);
4980 if (dump_file)
4981 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4983 while (follow && GET_CODE (follow) == INSN
4984 && GET_CODE (PATTERN (follow)) == SET
4985 && !dead_or_set_p (follow, SET_SRC (pat))
4986 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4987 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4989 if (dump_file)
4990 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4991 follow = next_nonnote_insn (follow);
4994 if (dump_file)
4995 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4996 if (follow && GET_CODE (follow) == INSN
4997 && GET_CODE (PATTERN (follow)) == SET
4998 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5000 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5002 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5004 replace = 1;
5005 where = & SET_SRC (PATTERN (follow));
5008 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5010 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5012 replace = 1;
5013 where = & PATTERN (follow);
5019 /* If so, follow is the corresponding insn */
5020 if (replace)
5022 if (dump_file)
5024 rtx x;
5026 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5027 for (x = insn; x ;x = NEXT_INSN (x))
5029 print_rtl_single (dump_file, x);
5030 if (x == follow)
5031 break;
5032 fprintf (dump_file, "\n");
5036 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5037 follow, where))
5039 count ++;
5040 next = delete_insn (insn);
5041 if (dump_file)
5043 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5044 print_rtl_single (dump_file, follow);
5046 done = 0;
5052 if (dump_file)
5054 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5055 fprintf (dump_file, "=====\n");
5058 #endif
5061 /* Figure out where to put LABEL, which is the label for a repeat loop.
5062 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5063 the loop ends just before LAST_INSN. If SHARED, insns other than the
5064 "repeat" might use LABEL to jump to the loop's continuation point.
5066 Return the last instruction in the adjusted loop. */
5068 static rtx
5069 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5070 bool shared)
5072 rtx next, prev;
5073 int count = 0, code, icode;
5075 if (dump_file)
5076 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5077 INSN_UID (last_insn));
5079 /* Set PREV to the last insn in the loop. */
5080 prev = last_insn;
5081 if (!including)
5082 prev = PREV_INSN (prev);
5084 /* Set NEXT to the next insn after the repeat label. */
5085 next = last_insn;
5086 if (!shared)
5087 while (prev != 0)
5089 code = GET_CODE (prev);
5090 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5091 break;
5093 if (INSN_P (prev))
5095 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5096 prev = XVECEXP (PATTERN (prev), 0, 1);
5098 /* Other insns that should not be in the last two opcodes. */
5099 icode = recog_memoized (prev);
5100 if (icode < 0
5101 || icode == CODE_FOR_repeat
5102 || icode == CODE_FOR_erepeat
5103 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5104 break;
5106 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5107 is the second instruction in a VLIW bundle. In that case,
5108 loop again: if the first instruction also satisfies the
5109 conditions above then we will reach here again and put
5110 both of them into the repeat epilogue. Otherwise both
5111 should remain outside. */
5112 if (GET_MODE (prev) != BImode)
5114 count++;
5115 next = prev;
5116 if (dump_file)
5117 print_rtl_single (dump_file, next);
5118 if (count == 2)
5119 break;
5122 prev = PREV_INSN (prev);
5125 /* See if we're adding the label immediately after the repeat insn.
5126 If so, we need to separate them with a nop. */
5127 prev = prev_real_insn (next);
5128 if (prev)
5129 switch (recog_memoized (prev))
5131 case CODE_FOR_repeat:
5132 case CODE_FOR_erepeat:
5133 if (dump_file)
5134 fprintf (dump_file, "Adding nop inside loop\n");
5135 emit_insn_before (gen_nop (), next);
5136 break;
5138 default:
5139 break;
5142 /* Insert the label. */
5143 emit_label_before (label, next);
5145 /* Insert the nops. */
5146 if (dump_file && count < 2)
5147 fprintf (dump_file, "Adding %d nop%s\n\n",
5148 2 - count, count == 1 ? "" : "s");
5150 for (; count < 2; count++)
5151 if (including)
5152 last_insn = emit_insn_after (gen_nop (), last_insn);
5153 else
5154 emit_insn_before (gen_nop (), last_insn);
5156 return last_insn;
5160 void
5161 mep_emit_doloop (rtx *operands, int is_end)
5163 rtx tag;
5165 if (cfun->machine->doloop_tags == 0
5166 || cfun->machine->doloop_tag_from_end == is_end)
5168 cfun->machine->doloop_tags++;
5169 cfun->machine->doloop_tag_from_end = is_end;
5172 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5173 if (is_end)
5174 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5175 else
5176 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5180 /* Code for converting doloop_begins and doloop_ends into valid
5181 MeP instructions. A doloop_begin is just a placeholder:
5183 $count = unspec ($count)
5185 where $count is initially the number of iterations - 1.
5186 doloop_end has the form:
5188 if ($count-- == 0) goto label
5190 The counter variable is private to the doloop insns, nothing else
5191 relies on its value.
5193 There are three cases, in decreasing order of preference:
5195 1. A loop has exactly one doloop_begin and one doloop_end.
5196 The doloop_end branches to the first instruction after
5197 the doloop_begin.
5199 In this case we can replace the doloop_begin with a repeat
5200 instruction and remove the doloop_end. I.e.:
5202 $count1 = unspec ($count1)
5203 label:
5205 insn1
5206 insn2
5207 if ($count2-- == 0) goto label
5209 becomes:
5211 repeat $count1,repeat_label
5212 label:
5214 repeat_label:
5215 insn1
5216 insn2
5217 # end repeat
5219 2. As for (1), except there are several doloop_ends. One of them
5220 (call it X) falls through to a label L. All the others fall
5221 through to branches to L.
5223 In this case, we remove X and replace the other doloop_ends
5224 with branches to the repeat label. For example:
5226 $count1 = unspec ($count1)
5227 start:
5229 if ($count2-- == 0) goto label
5230 end:
5232 if ($count3-- == 0) goto label
5233 goto end
5235 becomes:
5237 repeat $count1,repeat_label
5238 start:
5240 repeat_label:
5243 # end repeat
5244 end:
5246 goto repeat_label
5248 3. The fallback case. Replace doloop_begins with:
5250 $count = $count + 1
5252 Replace doloop_ends with the equivalent of:
5254 $count = $count - 1
5255 if ($count == 0) goto label
5257 Note that this might need a scratch register if $count
5258 is stored in memory. */
5260 /* A structure describing one doloop_begin. */
5261 struct mep_doloop_begin {
5262 /* The next doloop_begin with the same tag. */
5263 struct mep_doloop_begin *next;
5265 /* The instruction itself. */
5266 rtx insn;
5268 /* The initial counter value. This is known to be a general register. */
5269 rtx counter;
5272 /* A structure describing a doloop_end. */
5273 struct mep_doloop_end {
5274 /* The next doloop_end with the same loop tag. */
5275 struct mep_doloop_end *next;
5277 /* The instruction itself. */
5278 rtx insn;
5280 /* The first instruction after INSN when the branch isn't taken. */
5281 rtx fallthrough;
5283 /* The location of the counter value. Since doloop_end_internal is a
5284 jump instruction, it has to allow the counter to be stored anywhere
5285 (any non-fixed register or memory location). */
5286 rtx counter;
5288 /* The target label (the place where the insn branches when the counter
5289 isn't zero). */
5290 rtx label;
5292 /* A scratch register. Only available when COUNTER isn't stored
5293 in a general register. */
5294 rtx scratch;
5298 /* One do-while loop. */
5299 struct mep_doloop {
5300 /* All the doloop_begins for this loop (in no particular order). */
5301 struct mep_doloop_begin *begin;
5303 /* All the doloop_ends. When there is more than one, arrange things
5304 so that the first one is the most likely to be X in case (2) above. */
5305 struct mep_doloop_end *end;
5309 /* Return true if LOOP can be converted into repeat/repeat_end form
5310 (that is, if it matches cases (1) or (2) above). */
5312 static bool
5313 mep_repeat_loop_p (struct mep_doloop *loop)
5315 struct mep_doloop_end *end;
5316 rtx fallthrough;
5318 /* There must be exactly one doloop_begin and at least one doloop_end. */
5319 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5320 return false;
5322 /* The first doloop_end (X) must branch back to the insn after
5323 the doloop_begin. */
5324 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5325 return false;
5327 /* All the other doloop_ends must branch to the same place as X.
5328 When the branch isn't taken, they must jump to the instruction
5329 after X. */
5330 fallthrough = loop->end->fallthrough;
5331 for (end = loop->end->next; end != 0; end = end->next)
5332 if (end->label != loop->end->label
5333 || !simplejump_p (end->fallthrough)
5334 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5335 return false;
5337 return true;
5341 /* The main repeat reorg function. See comment above for details. */
5343 static void
5344 mep_reorg_repeat (rtx insns)
5346 rtx insn;
5347 struct mep_doloop *loops, *loop;
5348 struct mep_doloop_begin *begin;
5349 struct mep_doloop_end *end;
5351 /* Quick exit if we haven't created any loops. */
5352 if (cfun->machine->doloop_tags == 0)
5353 return;
5355 /* Create an array of mep_doloop structures. */
5356 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5357 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5359 /* Search the function for do-while insns and group them by loop tag. */
5360 for (insn = insns; insn; insn = NEXT_INSN (insn))
5361 if (INSN_P (insn))
5362 switch (recog_memoized (insn))
5364 case CODE_FOR_doloop_begin_internal:
5365 insn_extract (insn);
5366 loop = &loops[INTVAL (recog_data.operand[2])];
5368 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5369 begin->next = loop->begin;
5370 begin->insn = insn;
5371 begin->counter = recog_data.operand[0];
5373 loop->begin = begin;
5374 break;
5376 case CODE_FOR_doloop_end_internal:
5377 insn_extract (insn);
5378 loop = &loops[INTVAL (recog_data.operand[2])];
5380 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5381 end->insn = insn;
5382 end->fallthrough = next_real_insn (insn);
5383 end->counter = recog_data.operand[0];
5384 end->label = recog_data.operand[1];
5385 end->scratch = recog_data.operand[3];
5387 /* If this insn falls through to an unconditional jump,
5388 give it a lower priority than the others. */
5389 if (loop->end != 0 && simplejump_p (end->fallthrough))
5391 end->next = loop->end->next;
5392 loop->end->next = end;
5394 else
5396 end->next = loop->end;
5397 loop->end = end;
5399 break;
5402 /* Convert the insns for each loop in turn. */
5403 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5404 if (mep_repeat_loop_p (loop))
5406 /* Case (1) or (2). */
5407 rtx repeat_label, label_ref;
5409 /* Create a new label for the repeat insn. */
5410 repeat_label = gen_label_rtx ();
5412 /* Replace the doloop_begin with a repeat. */
5413 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5414 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5415 loop->begin->insn);
5416 delete_insn (loop->begin->insn);
5418 /* Insert the repeat label before the first doloop_end.
5419 Fill the gap with nops if there are other doloop_ends. */
5420 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5421 false, loop->end->next != 0);
5423 /* Emit a repeat_end (to improve the readability of the output). */
5424 emit_insn_before (gen_repeat_end (), loop->end->insn);
5426 /* Delete the first doloop_end. */
5427 delete_insn (loop->end->insn);
5429 /* Replace the others with branches to REPEAT_LABEL. */
5430 for (end = loop->end->next; end != 0; end = end->next)
5432 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5433 delete_insn (end->insn);
5434 delete_insn (end->fallthrough);
5437 else
5439 /* Case (3). First replace all the doloop_begins with increment
5440 instructions. */
5441 for (begin = loop->begin; begin != 0; begin = begin->next)
5443 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5444 begin->counter, const1_rtx),
5445 begin->insn);
5446 delete_insn (begin->insn);
5449 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5450 for (end = loop->end; end != 0; end = end->next)
5452 rtx reg;
5454 start_sequence ();
5456 /* Load the counter value into a general register. */
5457 reg = end->counter;
5458 if (!REG_P (reg) || REGNO (reg) > 15)
5460 reg = end->scratch;
5461 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5464 /* Decrement the counter. */
5465 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5466 constm1_rtx));
5468 /* Copy it back to its original location. */
5469 if (reg != end->counter)
5470 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5472 /* Jump back to the start label. */
5473 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5474 end->label));
5475 JUMP_LABEL (insn) = end->label;
5476 LABEL_NUSES (end->label)++;
5478 /* Emit the whole sequence before the doloop_end. */
5479 insn = get_insns ();
5480 end_sequence ();
5481 emit_insn_before (insn, end->insn);
5483 /* Delete the doloop_end. */
5484 delete_insn (end->insn);
5490 static bool
5491 mep_invertable_branch_p (rtx insn)
5493 rtx cond, set;
5494 enum rtx_code old_code;
5495 int i;
5497 set = PATTERN (insn);
5498 if (GET_CODE (set) != SET)
5499 return false;
5500 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5501 return false;
5502 cond = XEXP (XEXP (set, 1), 0);
5503 old_code = GET_CODE (cond);
5504 switch (old_code)
5506 case EQ:
5507 PUT_CODE (cond, NE);
5508 break;
5509 case NE:
5510 PUT_CODE (cond, EQ);
5511 break;
5512 case LT:
5513 PUT_CODE (cond, GE);
5514 break;
5515 case GE:
5516 PUT_CODE (cond, LT);
5517 break;
5518 default:
5519 return false;
5521 INSN_CODE (insn) = -1;
5522 i = recog_memoized (insn);
5523 PUT_CODE (cond, old_code);
5524 INSN_CODE (insn) = -1;
5525 return i >= 0;
5528 static void
5529 mep_invert_branch (rtx insn, rtx after)
5531 rtx cond, set, label;
5532 int i;
5534 set = PATTERN (insn);
5536 gcc_assert (GET_CODE (set) == SET);
5537 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5539 cond = XEXP (XEXP (set, 1), 0);
5540 switch (GET_CODE (cond))
5542 case EQ:
5543 PUT_CODE (cond, NE);
5544 break;
5545 case NE:
5546 PUT_CODE (cond, EQ);
5547 break;
5548 case LT:
5549 PUT_CODE (cond, GE);
5550 break;
5551 case GE:
5552 PUT_CODE (cond, LT);
5553 break;
5554 default:
5555 gcc_unreachable ();
5557 label = gen_label_rtx ();
5558 emit_label_after (label, after);
5559 for (i=1; i<=2; i++)
5560 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5562 rtx ref = XEXP (XEXP (set, 1), i);
5563 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5564 delete_insn (XEXP (ref, 0));
5565 XEXP (ref, 0) = label;
5566 LABEL_NUSES (label) ++;
5567 JUMP_LABEL (insn) = label;
5569 INSN_CODE (insn) = -1;
5570 i = recog_memoized (insn);
5571 gcc_assert (i >= 0);
5574 static void
5575 mep_reorg_erepeat (rtx insns)
5577 rtx insn, prev, l, x;
5578 int count;
5580 for (insn = insns; insn; insn = NEXT_INSN (insn))
5581 if (JUMP_P (insn)
5582 && ! JUMP_TABLE_DATA_P (insn)
5583 && mep_invertable_branch_p (insn))
5585 if (dump_file)
5587 fprintf (dump_file, "\n------------------------------\n");
5588 fprintf (dump_file, "erepeat: considering this jump:\n");
5589 print_rtl_single (dump_file, insn);
5591 count = simplejump_p (insn) ? 0 : 1;
5592 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5594 if (GET_CODE (prev) == CALL_INSN
5595 || BARRIER_P (prev))
5596 break;
5598 if (prev == JUMP_LABEL (insn))
5600 rtx newlast;
5601 if (dump_file)
5602 fprintf (dump_file, "found loop top, %d insns\n", count);
5604 if (LABEL_NUSES (prev) == 1)
5605 /* We're the only user, always safe */ ;
5606 else if (LABEL_NUSES (prev) == 2)
5608 /* See if there's a barrier before this label. If
5609 so, we know nobody inside the loop uses it.
5610 But we must be careful to put the erepeat
5611 *after* the label. */
5612 rtx barrier;
5613 for (barrier = PREV_INSN (prev);
5614 barrier && GET_CODE (barrier) == NOTE;
5615 barrier = PREV_INSN (barrier))
5617 if (barrier && GET_CODE (barrier) != BARRIER)
5618 break;
5620 else
5622 /* We don't know who else, within or without our loop, uses this */
5623 if (dump_file)
5624 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5625 break;
5628 /* Generate a label to be used by the erepat insn. */
5629 l = gen_label_rtx ();
5631 /* Insert the erepeat after INSN's target label. */
5632 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5633 LABEL_NUSES (l)++;
5634 emit_insn_after (x, prev);
5636 /* Insert the erepeat label. */
5637 newlast = (mep_insert_repeat_label_last
5638 (insn, l, !simplejump_p (insn), false));
5639 if (simplejump_p (insn))
5641 emit_insn_before (gen_erepeat_end (), insn);
5642 delete_insn (insn);
5644 else
5646 mep_invert_branch (insn, newlast);
5647 emit_insn_after (gen_erepeat_end (), newlast);
5649 break;
5652 if (LABEL_P (prev))
5654 /* A label is OK if there is exactly one user, and we
5655 can find that user before the next label. */
5656 rtx user = 0;
5657 int safe = 0;
5658 if (LABEL_NUSES (prev) == 1)
5660 for (user = PREV_INSN (prev);
5661 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5662 user = PREV_INSN (user))
5663 if (GET_CODE (user) == JUMP_INSN
5664 && JUMP_LABEL (user) == prev)
5666 safe = INSN_UID (user);
5667 break;
5670 if (!safe)
5671 break;
5672 if (dump_file)
5673 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5674 safe, INSN_UID (prev));
5677 if (INSN_P (prev))
5679 count ++;
5683 if (dump_file)
5684 fprintf (dump_file, "\n==============================\n");
5687 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5688 always do this on its own. */
5690 static void
5691 mep_jmp_return_reorg (rtx insns)
5693 rtx insn, label, ret;
5694 int ret_code;
5696 for (insn = insns; insn; insn = NEXT_INSN (insn))
5697 if (simplejump_p (insn))
5699 /* Find the fist real insn the jump jumps to. */
5700 label = ret = JUMP_LABEL (insn);
5701 while (ret
5702 && (GET_CODE (ret) == NOTE
5703 || GET_CODE (ret) == CODE_LABEL
5704 || GET_CODE (PATTERN (ret)) == USE))
5705 ret = NEXT_INSN (ret);
5707 if (ret)
5709 /* Is it a return? */
5710 ret_code = recog_memoized (ret);
5711 if (ret_code == CODE_FOR_return_internal
5712 || ret_code == CODE_FOR_eh_return_internal)
5714 /* It is. Replace the jump with a return. */
5715 LABEL_NUSES (label) --;
5716 if (LABEL_NUSES (label) == 0)
5717 delete_insn (label);
5718 PATTERN (insn) = copy_rtx (PATTERN (ret));
5719 INSN_CODE (insn) = -1;
5726 static void
5727 mep_reorg_addcombine (rtx insns)
5729 rtx i, n;
5731 for (i = insns; i; i = NEXT_INSN (i))
5732 if (INSN_P (i)
5733 && INSN_CODE (i) == CODE_FOR_addsi3
5734 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5735 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5736 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5737 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5739 n = NEXT_INSN (i);
5740 if (INSN_P (n)
5741 && INSN_CODE (n) == CODE_FOR_addsi3
5742 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5743 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5744 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5745 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5747 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5748 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5749 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5750 && ic + nc < 32767
5751 && ic + nc > -32768)
5753 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5754 NEXT_INSN (i) = NEXT_INSN (n);
5755 if (NEXT_INSN (i))
5756 PREV_INSN (NEXT_INSN (i)) = i;
5762 /* If this insn adjusts the stack, return the adjustment, else return
5763 zero. */
5764 static int
5765 add_sp_insn_p (rtx insn)
5767 rtx pat;
5769 if (! single_set (insn))
5770 return 0;
5771 pat = PATTERN (insn);
5772 if (GET_CODE (SET_DEST (pat)) != REG)
5773 return 0;
5774 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5775 return 0;
5776 if (GET_CODE (SET_SRC (pat)) != PLUS)
5777 return 0;
5778 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5779 return 0;
5780 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5781 return 0;
5782 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5783 return 0;
5784 return INTVAL (XEXP (SET_SRC (pat), 1));
5787 /* Check for trivial functions that set up an unneeded stack
5788 frame. */
5789 static void
5790 mep_reorg_noframe (rtx insns)
5792 rtx start_frame_insn;
5793 rtx end_frame_insn = 0;
5794 int sp_adjust, sp2;
5795 rtx sp;
5797 /* The first insn should be $sp = $sp + N */
5798 while (insns && ! INSN_P (insns))
5799 insns = NEXT_INSN (insns);
5800 if (!insns)
5801 return;
5803 sp_adjust = add_sp_insn_p (insns);
5804 if (sp_adjust == 0)
5805 return;
5807 start_frame_insn = insns;
5808 sp = SET_DEST (PATTERN (start_frame_insn));
5810 insns = next_real_insn (insns);
5812 while (insns)
5814 rtx next = next_real_insn (insns);
5815 if (!next)
5816 break;
5818 sp2 = add_sp_insn_p (insns);
5819 if (sp2)
5821 if (end_frame_insn)
5822 return;
5823 end_frame_insn = insns;
5824 if (sp2 != -sp_adjust)
5825 return;
5827 else if (mep_mentioned_p (insns, sp, 0))
5828 return;
5829 else if (CALL_P (insns))
5830 return;
5832 insns = next;
5835 if (end_frame_insn)
5837 delete_insn (start_frame_insn);
5838 delete_insn (end_frame_insn);
5842 static void
5843 mep_reorg (void)
5845 rtx insns = get_insns ();
5847 /* We require accurate REG_DEAD notes. */
5848 compute_bb_for_insn ();
5849 df_note_add_problem ();
5850 df_analyze ();
5852 mep_reorg_addcombine (insns);
5853 #if EXPERIMENTAL_REGMOVE_REORG
5854 /* VLIW packing has been done already, so we can't just delete things. */
5855 if (!mep_vliw_function_p (cfun->decl))
5856 mep_reorg_regmove (insns);
5857 #endif
5858 mep_jmp_return_reorg (insns);
5859 mep_bundle_insns (insns);
5860 mep_reorg_repeat (insns);
5861 if (optimize
5862 && !profile_flag
5863 && !profile_arc_flag
5864 && TARGET_OPT_REPEAT
5865 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5866 mep_reorg_erepeat (insns);
5868 /* This may delete *insns so make sure it's last. */
5869 mep_reorg_noframe (insns);
5871 df_finish_pass (false);
5876 /*----------------------------------------------------------------------*/
5877 /* Builtins */
5878 /*----------------------------------------------------------------------*/
5880 /* Element X gives the index into cgen_insns[] of the most general
5881 implementation of intrinsic X. Unimplemented intrinsics are
5882 mapped to -1. */
5883 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5885 /* Element X gives the index of another instruction that is mapped to
5886 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5887 instruction.
5889 Things are set up so that mep_intrinsic_chain[X] < X. */
5890 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5892 /* The bitmask for the current ISA. The ISA masks are declared
5893 in mep-intrin.h. */
5894 unsigned int mep_selected_isa;
5896 struct mep_config {
5897 const char *config_name;
5898 unsigned int isa;
5901 static struct mep_config mep_configs[] = {
5902 #ifdef COPROC_SELECTION_TABLE
5903 COPROC_SELECTION_TABLE,
5904 #endif
5905 { 0, 0 }
5908 /* Initialize the global intrinsics variables above. */
5910 static void
5911 mep_init_intrinsics (void)
5913 size_t i;
5915 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5916 mep_selected_isa = mep_configs[0].isa;
5917 if (mep_config_string != 0)
5918 for (i = 0; mep_configs[i].config_name; i++)
5919 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5921 mep_selected_isa = mep_configs[i].isa;
5922 break;
5925 /* Assume all intrinsics are unavailable. */
5926 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5927 mep_intrinsic_insn[i] = -1;
5929 /* Build up the global intrinsic tables. */
5930 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5931 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5933 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5934 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5936 /* See whether we can directly move values between one coprocessor
5937 register and another. */
5938 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5939 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5940 mep_have_copro_copro_moves_p = true;
5942 /* See whether we can directly move values between core and
5943 coprocessor registers. */
5944 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5945 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5947 mep_have_core_copro_moves_p = 1;
5950 /* Declare all available intrinsic functions. Called once only. */
5952 static tree cp_data_bus_int_type_node;
5953 static tree opaque_vector_type_node;
5954 static tree v8qi_type_node;
5955 static tree v4hi_type_node;
5956 static tree v2si_type_node;
5957 static tree v8uqi_type_node;
5958 static tree v4uhi_type_node;
5959 static tree v2usi_type_node;
5961 static tree
5962 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5964 switch (cr)
5966 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5967 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5968 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5969 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5970 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5971 case cgen_regnum_operand_type_CHAR: return char_type_node;
5972 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5973 case cgen_regnum_operand_type_SI: return intSI_type_node;
5974 case cgen_regnum_operand_type_DI: return intDI_type_node;
5975 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5976 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5977 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5978 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5979 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5980 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5981 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5982 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5983 default:
5984 return void_type_node;
5988 static void
5989 mep_init_builtins (void)
5991 size_t i;
5993 if (TARGET_64BIT_CR_REGS)
5994 cp_data_bus_int_type_node = long_long_integer_type_node;
5995 else
5996 cp_data_bus_int_type_node = long_integer_type_node;
5998 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5999 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6000 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6001 v2si_type_node = build_vector_type (intSI_type_node, 2);
6002 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6003 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6004 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6006 (*lang_hooks.decls.pushdecl)
6007 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6008 cp_data_bus_int_type_node));
6010 (*lang_hooks.decls.pushdecl)
6011 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6012 opaque_vector_type_node));
6014 (*lang_hooks.decls.pushdecl)
6015 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6016 v8qi_type_node));
6017 (*lang_hooks.decls.pushdecl)
6018 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6019 v4hi_type_node));
6020 (*lang_hooks.decls.pushdecl)
6021 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6022 v2si_type_node));
6024 (*lang_hooks.decls.pushdecl)
6025 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6026 v8uqi_type_node));
6027 (*lang_hooks.decls.pushdecl)
6028 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6029 v4uhi_type_node));
6030 (*lang_hooks.decls.pushdecl)
6031 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6032 v2usi_type_node));
6034 /* Intrinsics like mep_cadd3 are implemented with two groups of
6035 instructions, one which uses UNSPECs and one which uses a specific
6036 rtl code such as PLUS. Instructions in the latter group belong
6037 to GROUP_KNOWN_CODE.
6039 In such cases, the intrinsic will have two entries in the global
6040 tables above. The unspec form is accessed using builtin functions
6041 while the specific form is accessed using the mep_* enum in
6042 mep-intrin.h.
6044 The idea is that __cop arithmetic and builtin functions have
6045 different optimization requirements. If mep_cadd3() appears in
6046 the source code, the user will surely except gcc to use cadd3
6047 rather than a work-alike such as add3. However, if the user
6048 just writes "a + b", where a or b are __cop variables, it is
6049 reasonable for gcc to choose a core instruction rather than
6050 cadd3 if it believes that is more optimal. */
6051 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6052 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6053 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6055 tree ret_type = void_type_node;
6056 tree bi_type;
6058 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6059 continue;
6061 if (cgen_insns[i].cret_p)
6062 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6064 bi_type = build_function_type (ret_type, 0);
6065 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6066 bi_type,
6067 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6071 /* Report the unavailablity of the given intrinsic. */
6073 #if 1
6074 static void
6075 mep_intrinsic_unavailable (int intrinsic)
6077 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6079 if (already_reported_p[intrinsic])
6080 return;
6082 if (mep_intrinsic_insn[intrinsic] < 0)
6083 error ("coprocessor intrinsic %qs is not available in this configuration",
6084 cgen_intrinsics[intrinsic]);
6085 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6086 error ("%qs is not available in VLIW functions",
6087 cgen_intrinsics[intrinsic]);
6088 else
6089 error ("%qs is not available in non-VLIW functions",
6090 cgen_intrinsics[intrinsic]);
6092 already_reported_p[intrinsic] = 1;
6094 #endif
6097 /* See if any implementation of INTRINSIC is available to the
6098 current function. If so, store the most general implementation
6099 in *INSN_PTR and return true. Return false otherwise. */
6101 static bool
6102 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6104 int i;
6106 i = mep_intrinsic_insn[intrinsic];
6107 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6108 i = mep_intrinsic_chain[i];
6110 if (i >= 0)
6112 *insn_ptr = &cgen_insns[i];
6113 return true;
6115 return false;
6119 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6120 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6121 try using a work-alike instead. In this case, the returned insn
6122 may have three operands rather than two. */
6124 static bool
6125 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6127 size_t i;
6129 if (intrinsic == mep_cmov)
6131 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6132 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6133 return true;
6134 return false;
6136 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6140 /* If ARG is a register operand that is the same size as MODE, convert it
6141 to MODE using a subreg. Otherwise return ARG as-is. */
6143 static rtx
6144 mep_convert_arg (enum machine_mode mode, rtx arg)
6146 if (GET_MODE (arg) != mode
6147 && register_operand (arg, VOIDmode)
6148 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6149 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6150 return arg;
6154 /* Apply regnum conversions to ARG using the description given by REGNUM.
6155 Return the new argument on success and null on failure. */
6157 static rtx
6158 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6160 if (regnum->count == 0)
6161 return arg;
6163 if (GET_CODE (arg) != CONST_INT
6164 || INTVAL (arg) < 0
6165 || INTVAL (arg) >= regnum->count)
6166 return 0;
6168 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6172 /* Try to make intrinsic argument ARG match the given operand.
6173 UNSIGNED_P is true if the argument has an unsigned type. */
6175 static rtx
6176 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6177 int unsigned_p)
6179 if (GET_CODE (arg) == CONST_INT)
6181 /* CONST_INTs can only be bound to integer operands. */
6182 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6183 return 0;
6185 else if (GET_CODE (arg) == CONST_DOUBLE)
6186 /* These hold vector constants. */;
6187 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6189 /* If the argument is a different size from what's expected, we must
6190 have a value in the right mode class in order to convert it. */
6191 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6192 return 0;
6194 /* If the operand is an rvalue, promote or demote it to match the
6195 operand's size. This might not need extra instructions when
6196 ARG is a register value. */
6197 if (operand->constraint[0] != '=')
6198 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6201 /* If the operand is an lvalue, bind the operand to a new register.
6202 The caller will copy this value into ARG after the main
6203 instruction. By doing this always, we produce slightly more
6204 optimal code. */
6205 /* But not for control registers. */
6206 if (operand->constraint[0] == '='
6207 && (! REG_P (arg)
6208 || ! (CONTROL_REGNO_P (REGNO (arg))
6209 || CCR_REGNO_P (REGNO (arg))
6210 || CR_REGNO_P (REGNO (arg)))
6212 return gen_reg_rtx (operand->mode);
6214 /* Try simple mode punning. */
6215 arg = mep_convert_arg (operand->mode, arg);
6216 if (operand->predicate (arg, operand->mode))
6217 return arg;
6219 /* See if forcing the argument into a register will make it match. */
6220 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6221 arg = force_reg (operand->mode, arg);
6222 else
6223 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6224 if (operand->predicate (arg, operand->mode))
6225 return arg;
6227 return 0;
6231 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6232 function FNNAME. OPERAND describes the operand to which ARGNUM
6233 is mapped. */
6235 static void
6236 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6237 int argnum, tree fnname)
6239 size_t i;
6241 if (GET_CODE (arg) == CONST_INT)
6242 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6243 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6245 const struct cgen_immediate_predicate *predicate;
6246 HOST_WIDE_INT argval;
6248 predicate = &cgen_immediate_predicates[i];
6249 argval = INTVAL (arg);
6250 if (argval < predicate->lower || argval >= predicate->upper)
6251 error ("argument %d of %qE must be in the range %d...%d",
6252 argnum, fnname, predicate->lower, predicate->upper - 1);
6253 else
6254 error ("argument %d of %qE must be a multiple of %d",
6255 argnum, fnname, predicate->align);
6256 return;
6259 error ("incompatible type for argument %d of %qE", argnum, fnname);
6262 static rtx
6263 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6264 rtx subtarget ATTRIBUTE_UNUSED,
6265 enum machine_mode mode ATTRIBUTE_UNUSED,
6266 int ignore ATTRIBUTE_UNUSED)
6268 rtx pat, op[10], arg[10];
6269 unsigned int a;
6270 int opindex, unsigned_p[10];
6271 tree fndecl, args;
6272 unsigned int n_args;
6273 tree fnname;
6274 const struct cgen_insn *cgen_insn;
6275 const struct insn_data_d *idata;
6276 unsigned int first_arg = 0;
6277 unsigned int builtin_n_args;
6279 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6280 fnname = DECL_NAME (fndecl);
6282 /* Find out which instruction we should emit. Note that some coprocessor
6283 intrinsics may only be available in VLIW mode, or only in normal mode. */
6284 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6286 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6287 return NULL_RTX;
6289 idata = &insn_data[cgen_insn->icode];
6291 builtin_n_args = cgen_insn->num_args;
6293 if (cgen_insn->cret_p)
6295 if (cgen_insn->cret_p > 1)
6296 builtin_n_args ++;
6297 first_arg = 1;
6298 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6299 builtin_n_args --;
6302 /* Evaluate each argument. */
6303 n_args = call_expr_nargs (exp);
6305 if (n_args < builtin_n_args)
6307 error ("too few arguments to %qE", fnname);
6308 return NULL_RTX;
6310 if (n_args > builtin_n_args)
6312 error ("too many arguments to %qE", fnname);
6313 return NULL_RTX;
6316 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6318 tree value;
6320 args = CALL_EXPR_ARG (exp, a - first_arg);
6322 value = args;
6324 #if 0
6325 if (cgen_insn->regnums[a].reference_p)
6327 if (TREE_CODE (value) != ADDR_EXPR)
6329 debug_tree(value);
6330 error ("argument %d of %qE must be an address", a+1, fnname);
6331 return NULL_RTX;
6333 value = TREE_OPERAND (value, 0);
6335 #endif
6337 /* If the argument has been promoted to int, get the unpromoted
6338 value. This is necessary when sub-int memory values are bound
6339 to reference parameters. */
6340 if (TREE_CODE (value) == NOP_EXPR
6341 && TREE_TYPE (value) == integer_type_node
6342 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6343 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6344 < TYPE_PRECISION (TREE_TYPE (value))))
6345 value = TREE_OPERAND (value, 0);
6347 /* If the argument has been promoted to double, get the unpromoted
6348 SFmode value. This is necessary for FMAX support, for example. */
6349 if (TREE_CODE (value) == NOP_EXPR
6350 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6351 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6352 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6353 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6354 value = TREE_OPERAND (value, 0);
6356 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6357 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6358 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6359 if (cgen_insn->regnums[a].reference_p)
6361 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6362 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6364 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6366 if (arg[a] == 0)
6368 error ("argument %d of %qE must be in the range %d...%d",
6369 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6370 return NULL_RTX;
6374 for (a = 0; a < first_arg; a++)
6376 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6377 arg[a] = target;
6378 else
6379 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6382 /* Convert the arguments into a form suitable for the intrinsic.
6383 Report an error if this isn't possible. */
6384 for (opindex = 0; opindex < idata->n_operands; opindex++)
6386 a = cgen_insn->op_mapping[opindex];
6387 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6388 arg[a], unsigned_p[a]);
6389 if (op[opindex] == 0)
6391 mep_incompatible_arg (&idata->operand[opindex],
6392 arg[a], a + 1 - first_arg, fnname);
6393 return NULL_RTX;
6397 /* Emit the instruction. */
6398 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6399 op[5], op[6], op[7], op[8], op[9]);
6401 if (GET_CODE (pat) == SET
6402 && GET_CODE (SET_DEST (pat)) == PC
6403 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6404 emit_jump_insn (pat);
6405 else
6406 emit_insn (pat);
6408 /* Copy lvalues back to their final locations. */
6409 for (opindex = 0; opindex < idata->n_operands; opindex++)
6410 if (idata->operand[opindex].constraint[0] == '=')
6412 a = cgen_insn->op_mapping[opindex];
6413 if (a >= first_arg)
6415 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6416 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6417 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6418 op[opindex]));
6419 else
6421 /* First convert the operand to the right mode, then copy it
6422 into the destination. Doing the conversion as a separate
6423 step (rather than using convert_move) means that we can
6424 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6425 refer to the same register. */
6426 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6427 op[opindex], unsigned_p[a]);
6428 if (!rtx_equal_p (arg[a], op[opindex]))
6429 emit_move_insn (arg[a], op[opindex]);
6434 if (first_arg > 0 && target && target != op[0])
6436 emit_move_insn (target, op[0]);
6439 return target;
6442 static bool
6443 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6445 return false;
6448 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6449 a global register. */
6451 static int
6452 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6454 int regno;
6455 rtx x = *loc;
6457 if (! x)
6458 return 0;
6460 switch (GET_CODE (x))
6462 case SUBREG:
6463 if (REG_P (SUBREG_REG (x)))
6465 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6466 && global_regs[subreg_regno (x)])
6467 return 1;
6468 return 0;
6470 break;
6472 case REG:
6473 regno = REGNO (x);
6474 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6475 return 1;
6476 return 0;
6478 case SCRATCH:
6479 case PC:
6480 case CC0:
6481 case CONST_INT:
6482 case CONST_DOUBLE:
6483 case CONST:
6484 case LABEL_REF:
6485 return 0;
6487 case CALL:
6488 /* A non-constant call might use a global register. */
6489 return 1;
6491 default:
6492 break;
6495 return 0;
6498 /* Returns nonzero if X mentions a global register. */
6500 static int
6501 global_reg_mentioned_p (rtx x)
6503 if (INSN_P (x))
6505 if (CALL_P (x))
6507 if (! RTL_CONST_OR_PURE_CALL_P (x))
6508 return 1;
6509 x = CALL_INSN_FUNCTION_USAGE (x);
6510 if (x == 0)
6511 return 0;
6513 else
6514 x = PATTERN (x);
6517 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6519 /* Scheduling hooks for VLIW mode.
6521 Conceptually this is very simple: we have a two-pack architecture
6522 that takes one core insn and one coprocessor insn to make up either
6523 a 32- or 64-bit instruction word (depending on the option bit set in
6524 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6525 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6526 and one 48-bit cop insn or two 32-bit core/cop insns.
6528 In practice, instruction selection will be a bear. Consider in
6529 VL64 mode the following insns
6531 add $1, 1
6532 cmov $cr0, $0
6534 these cannot pack, since the add is a 16-bit core insn and cmov
6535 is a 32-bit cop insn. However,
6537 add3 $1, $1, 1
6538 cmov $cr0, $0
6540 packs just fine. For good VLIW code generation in VL64 mode, we
6541 will have to have 32-bit alternatives for many of the common core
6542 insns. Not implemented. */
6544 static int
6545 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6547 int cost_specified;
6549 if (REG_NOTE_KIND (link) != 0)
6551 /* See whether INSN and DEP_INSN are intrinsics that set the same
6552 hard register. If so, it is more important to free up DEP_INSN
6553 than it is to free up INSN.
6555 Note that intrinsics like mep_mulr are handled differently from
6556 the equivalent mep.md patterns. In mep.md, if we don't care
6557 about the value of $lo and $hi, the pattern will just clobber
6558 the registers, not set them. Since clobbers don't count as
6559 output dependencies, it is often possible to reorder two mulrs,
6560 even after reload.
6562 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6563 so any pair of mep_mulr()s will be inter-dependent. We should
6564 therefore give the first mep_mulr() a higher priority. */
6565 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6566 && global_reg_mentioned_p (PATTERN (insn))
6567 && global_reg_mentioned_p (PATTERN (dep_insn)))
6568 return 1;
6570 /* If the dependence is an anti or output dependence, assume it
6571 has no cost. */
6572 return 0;
6575 /* If we can't recognize the insns, we can't really do anything. */
6576 if (recog_memoized (dep_insn) < 0)
6577 return cost;
6579 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6580 attribute instead. */
6581 if (!TARGET_H1)
6583 cost_specified = get_attr_latency (dep_insn);
6584 if (cost_specified != 0)
6585 return cost_specified;
6588 return cost;
6591 /* ??? We don't properly compute the length of a load/store insn,
6592 taking into account the addressing mode. */
6594 static int
6595 mep_issue_rate (void)
6597 return TARGET_IVC2 ? 3 : 2;
6600 /* Return true if function DECL was declared with the vliw attribute. */
6602 bool
6603 mep_vliw_function_p (tree decl)
6605 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6608 static rtx
6609 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6611 int i;
6613 for (i = nready - 1; i >= 0; --i)
6615 rtx insn = ready[i];
6616 if (recog_memoized (insn) >= 0
6617 && get_attr_slot (insn) == slot
6618 && get_attr_length (insn) == length)
6619 return insn;
6622 return NULL_RTX;
6625 static void
6626 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6628 int i;
6630 for (i = 0; i < nready; ++i)
6631 if (ready[i] == insn)
6633 for (; i < nready - 1; ++i)
6634 ready[i] = ready[i + 1];
6635 ready[i] = insn;
6636 return;
6639 gcc_unreachable ();
6642 static void
6643 mep_print_sched_insn (FILE *dump, rtx insn)
6645 const char *slots = "none";
6646 const char *name = NULL;
6647 int code;
6648 char buf[30];
6650 if (GET_CODE (PATTERN (insn)) == SET
6651 || GET_CODE (PATTERN (insn)) == PARALLEL)
6653 switch (get_attr_slots (insn))
6655 case SLOTS_CORE: slots = "core"; break;
6656 case SLOTS_C3: slots = "c3"; break;
6657 case SLOTS_P0: slots = "p0"; break;
6658 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6659 case SLOTS_P0_P1: slots = "p0,p1"; break;
6660 case SLOTS_P0S: slots = "p0s"; break;
6661 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6662 case SLOTS_P1: slots = "p1"; break;
6663 default:
6664 sprintf(buf, "%d", get_attr_slots (insn));
6665 slots = buf;
6666 break;
6669 if (GET_CODE (PATTERN (insn)) == USE)
6670 slots = "use";
6672 code = INSN_CODE (insn);
6673 if (code >= 0)
6674 name = get_insn_name (code);
6675 if (!name)
6676 name = "{unknown}";
6678 fprintf (dump,
6679 "insn %4d %4d %8s %s\n",
6680 code,
6681 INSN_UID (insn),
6682 name,
6683 slots);
6686 static int
6687 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6688 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6689 int *pnready, int clock ATTRIBUTE_UNUSED)
6691 int nready = *pnready;
6692 rtx core_insn, cop_insn;
6693 int i;
6695 if (dump && sched_verbose > 1)
6697 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6698 for (i=0; i<nready; i++)
6699 mep_print_sched_insn (dump, ready[i]);
6700 fprintf (dump, "\n");
6703 if (!mep_vliw_function_p (cfun->decl))
6704 return 1;
6705 if (nready < 2)
6706 return 1;
6708 /* IVC2 uses a DFA to determine what's ready and what's not. */
6709 if (TARGET_IVC2)
6710 return nready;
6712 /* We can issue either a core or coprocessor instruction.
6713 Look for a matched pair of insns to reorder. If we don't
6714 find any, don't second-guess the scheduler's priorities. */
6716 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6717 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6718 TARGET_OPT_VL64 ? 6 : 2)))
6720 else if (TARGET_OPT_VL64
6721 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6722 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6724 else
6725 /* We didn't find a pair. Issue the single insn at the head
6726 of the ready list. */
6727 return 1;
6729 /* Reorder the two insns first. */
6730 mep_move_ready_insn (ready, nready, core_insn);
6731 mep_move_ready_insn (ready, nready - 1, cop_insn);
6732 return 2;
6735 /* A for_each_rtx callback. Return true if *X is a register that is
6736 set by insn PREV. */
6738 static int
6739 mep_store_find_set (rtx *x, void *prev)
6741 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6744 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6745 not the containing insn. */
6747 static bool
6748 mep_store_data_bypass_1 (rtx prev, rtx pat)
6750 /* Cope with intrinsics like swcpa. */
6751 if (GET_CODE (pat) == PARALLEL)
6753 int i;
6755 for (i = 0; i < XVECLEN (pat, 0); i++)
6756 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6757 return true;
6759 return false;
6762 /* Check for some sort of store. */
6763 if (GET_CODE (pat) != SET
6764 || GET_CODE (SET_DEST (pat)) != MEM)
6765 return false;
6767 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6768 The first operand to the unspec is the store data and the other operands
6769 are used to calculate the address. */
6770 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6772 rtx src;
6773 int i;
6775 src = SET_SRC (pat);
6776 for (i = 1; i < XVECLEN (src, 0); i++)
6777 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6778 return false;
6780 return true;
6783 /* Otherwise just check that PREV doesn't modify any register mentioned
6784 in the memory destination. */
6785 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6788 /* Return true if INSN is a store instruction and if the store address
6789 has no true dependence on PREV. */
6791 bool
6792 mep_store_data_bypass_p (rtx prev, rtx insn)
6794 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6797 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6798 is a register other than LO or HI and if PREV sets *X. */
6800 static int
6801 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6803 return (REG_P (*x)
6804 && REGNO (*x) != LO_REGNO
6805 && REGNO (*x) != HI_REGNO
6806 && reg_set_p (*x, (const_rtx) prev));
6809 /* Return true if, apart from HI/LO, there are no true dependencies
6810 between multiplication instructions PREV and INSN. */
6812 bool
6813 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6815 rtx pat;
6817 pat = PATTERN (insn);
6818 if (GET_CODE (pat) == PARALLEL)
6819 pat = XVECEXP (pat, 0, 0);
6820 return (GET_CODE (pat) == SET
6821 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6824 /* Return true if INSN is an ldc instruction that issues to the
6825 MeP-h1 integer pipeline. This is true for instructions that
6826 read from PSW, LP, SAR, HI and LO. */
6828 bool
6829 mep_ipipe_ldc_p (rtx insn)
6831 rtx pat, src;
6833 pat = PATTERN (insn);
6835 /* Cope with instrinsics that set both a hard register and its shadow.
6836 The set of the hard register comes first. */
6837 if (GET_CODE (pat) == PARALLEL)
6838 pat = XVECEXP (pat, 0, 0);
6840 if (GET_CODE (pat) == SET)
6842 src = SET_SRC (pat);
6844 /* Cope with intrinsics. The first operand to the unspec is
6845 the source register. */
6846 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6847 src = XVECEXP (src, 0, 0);
6849 if (REG_P (src))
6850 switch (REGNO (src))
6852 case PSW_REGNO:
6853 case LP_REGNO:
6854 case SAR_REGNO:
6855 case HI_REGNO:
6856 case LO_REGNO:
6857 return true;
6860 return false;
6863 /* Create a VLIW bundle from core instruction CORE and coprocessor
6864 instruction COP. COP always satisfies INSN_P, but CORE can be
6865 either a new pattern or an existing instruction.
6867 Emit the bundle in place of COP and return it. */
6869 static rtx
6870 mep_make_bundle (rtx core, rtx cop)
6872 rtx insn;
6874 /* If CORE is an existing instruction, remove it, otherwise put
6875 the new pattern in an INSN harness. */
6876 if (INSN_P (core))
6877 remove_insn (core);
6878 else
6879 core = make_insn_raw (core);
6881 /* Generate the bundle sequence and replace COP with it. */
6882 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6883 insn = emit_insn_after (insn, cop);
6884 remove_insn (cop);
6886 /* Set up the links of the insns inside the SEQUENCE. */
6887 PREV_INSN (core) = PREV_INSN (insn);
6888 NEXT_INSN (core) = cop;
6889 PREV_INSN (cop) = core;
6890 NEXT_INSN (cop) = NEXT_INSN (insn);
6892 /* Set the VLIW flag for the coprocessor instruction. */
6893 PUT_MODE (core, VOIDmode);
6894 PUT_MODE (cop, BImode);
6896 /* Derive a location for the bundle. Individual instructions cannot
6897 have their own location because there can be no assembler labels
6898 between CORE and COP. */
6899 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6900 INSN_LOCATOR (core) = 0;
6901 INSN_LOCATOR (cop) = 0;
6903 return insn;
6906 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6908 static void
6909 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6911 rtx * pinsn = (rtx *) data;
6913 if (*pinsn && reg_mentioned_p (x, *pinsn))
6914 *pinsn = NULL_RTX;
6917 /* Return true if anything in insn X is (anti,output,true) dependent on
6918 anything in insn Y. */
6920 static int
6921 mep_insn_dependent_p (rtx x, rtx y)
6923 rtx tmp;
6925 gcc_assert (INSN_P (x));
6926 gcc_assert (INSN_P (y));
6928 tmp = PATTERN (y);
6929 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6930 if (tmp == NULL_RTX)
6931 return 1;
6933 tmp = PATTERN (x);
6934 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6935 if (tmp == NULL_RTX)
6936 return 1;
6938 return 0;
6941 static int
6942 core_insn_p (rtx insn)
6944 if (GET_CODE (PATTERN (insn)) == USE)
6945 return 0;
6946 if (get_attr_slot (insn) == SLOT_CORE)
6947 return 1;
6948 return 0;
6951 /* Mark coprocessor instructions that can be bundled together with
6952 the immediately preceeding core instruction. This is later used
6953 to emit the "+" that tells the assembler to create a VLIW insn.
6955 For unbundled insns, the assembler will automatically add coprocessor
6956 nops, and 16-bit core nops. Due to an apparent oversight in the
6957 spec, the assembler will _not_ automatically add 32-bit core nops,
6958 so we have to emit those here.
6960 Called from mep_insn_reorg. */
6962 static void
6963 mep_bundle_insns (rtx insns)
6965 rtx insn, last = NULL_RTX, first = NULL_RTX;
6966 int saw_scheduling = 0;
6968 /* Only do bundling if we're in vliw mode. */
6969 if (!mep_vliw_function_p (cfun->decl))
6970 return;
6972 /* The first insn in a bundle are TImode, the remainder are
6973 VOIDmode. After this function, the first has VOIDmode and the
6974 rest have BImode. */
6976 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6978 /* First, move any NOTEs that are within a bundle, to the beginning
6979 of the bundle. */
6980 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6982 if (NOTE_P (insn) && first)
6983 /* Don't clear FIRST. */;
6985 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6986 first = insn;
6988 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6990 rtx note, prev;
6992 /* INSN is part of a bundle; FIRST is the first insn in that
6993 bundle. Move all intervening notes out of the bundle.
6994 In addition, since the debug pass may insert a label
6995 whenever the current line changes, set the location info
6996 for INSN to match FIRST. */
6998 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7000 note = PREV_INSN (insn);
7001 while (note && note != first)
7003 prev = PREV_INSN (note);
7005 if (NOTE_P (note))
7007 /* Remove NOTE from here... */
7008 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7009 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7010 /* ...and put it in here. */
7011 NEXT_INSN (note) = first;
7012 PREV_INSN (note) = PREV_INSN (first);
7013 NEXT_INSN (PREV_INSN (note)) = note;
7014 PREV_INSN (NEXT_INSN (note)) = note;
7017 note = prev;
7021 else if (!NONJUMP_INSN_P (insn))
7022 first = 0;
7025 /* Now fix up the bundles. */
7026 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7028 if (NOTE_P (insn))
7029 continue;
7031 if (!NONJUMP_INSN_P (insn))
7033 last = 0;
7034 continue;
7037 /* If we're not optimizing enough, there won't be scheduling
7038 info. We detect that here. */
7039 if (GET_MODE (insn) == TImode)
7040 saw_scheduling = 1;
7041 if (!saw_scheduling)
7042 continue;
7044 if (TARGET_IVC2)
7046 rtx core_insn = NULL_RTX;
7048 /* IVC2 slots are scheduled by DFA, so we just accept
7049 whatever the scheduler gives us. However, we must make
7050 sure the core insn (if any) is the first in the bundle.
7051 The IVC2 assembler can insert whatever NOPs are needed,
7052 and allows a COP insn to be first. */
7054 if (NONJUMP_INSN_P (insn)
7055 && GET_CODE (PATTERN (insn)) != USE
7056 && GET_MODE (insn) == TImode)
7058 for (last = insn;
7059 NEXT_INSN (last)
7060 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7061 && NONJUMP_INSN_P (NEXT_INSN (last));
7062 last = NEXT_INSN (last))
7064 if (core_insn_p (last))
7065 core_insn = last;
7067 if (core_insn_p (last))
7068 core_insn = last;
7070 if (core_insn && core_insn != insn)
7072 /* Swap core insn to first in the bundle. */
7074 /* Remove core insn. */
7075 if (PREV_INSN (core_insn))
7076 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7077 if (NEXT_INSN (core_insn))
7078 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7080 /* Re-insert core insn. */
7081 PREV_INSN (core_insn) = PREV_INSN (insn);
7082 NEXT_INSN (core_insn) = insn;
7084 if (PREV_INSN (core_insn))
7085 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7086 PREV_INSN (insn) = core_insn;
7088 PUT_MODE (core_insn, TImode);
7089 PUT_MODE (insn, VOIDmode);
7093 /* The first insn has TImode, the rest have VOIDmode */
7094 if (GET_MODE (insn) == TImode)
7095 PUT_MODE (insn, VOIDmode);
7096 else
7097 PUT_MODE (insn, BImode);
7098 continue;
7101 PUT_MODE (insn, VOIDmode);
7102 if (recog_memoized (insn) >= 0
7103 && get_attr_slot (insn) == SLOT_COP)
7105 if (GET_CODE (insn) == JUMP_INSN
7106 || ! last
7107 || recog_memoized (last) < 0
7108 || get_attr_slot (last) != SLOT_CORE
7109 || (get_attr_length (insn)
7110 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7111 || mep_insn_dependent_p (insn, last))
7113 switch (get_attr_length (insn))
7115 case 8:
7116 break;
7117 case 6:
7118 insn = mep_make_bundle (gen_nop (), insn);
7119 break;
7120 case 4:
7121 if (TARGET_OPT_VL64)
7122 insn = mep_make_bundle (gen_nop32 (), insn);
7123 break;
7124 case 2:
7125 if (TARGET_OPT_VL64)
7126 error ("2 byte cop instructions are"
7127 " not allowed in 64-bit VLIW mode");
7128 else
7129 insn = mep_make_bundle (gen_nop (), insn);
7130 break;
7131 default:
7132 error ("unexpected %d byte cop instruction",
7133 get_attr_length (insn));
7134 break;
7137 else
7138 insn = mep_make_bundle (last, insn);
7141 last = insn;
7146 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7147 Return true on success. This function can fail if the intrinsic
7148 is unavailable or if the operands don't satisfy their predicates. */
7150 bool
7151 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7153 const struct cgen_insn *cgen_insn;
7154 const struct insn_data_d *idata;
7155 rtx newop[10];
7156 int i;
7158 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7159 return false;
7161 idata = &insn_data[cgen_insn->icode];
7162 for (i = 0; i < idata->n_operands; i++)
7164 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7165 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7166 return false;
7169 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7170 newop[3], newop[4], newop[5],
7171 newop[6], newop[7], newop[8]));
7173 return true;
7177 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7178 OPERANDS[0]. Report an error if the instruction could not
7179 be synthesized. OPERANDS[1] is a register_operand. For sign
7180 and zero extensions, it may be smaller than SImode. */
7182 bool
7183 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7184 rtx * operands ATTRIBUTE_UNUSED)
7186 return false;
7190 /* Likewise, but apply a binary operation to OPERANDS[1] and
7191 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7192 can be a general_operand.
7194 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7195 third operand. REG and REG3 take register operands only. */
7197 bool
7198 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7199 int ATTRIBUTE_UNUSED immediate3,
7200 int ATTRIBUTE_UNUSED reg,
7201 int ATTRIBUTE_UNUSED reg3,
7202 rtx * operands ATTRIBUTE_UNUSED)
7204 return false;
7207 static bool
7208 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7210 switch (code)
7212 case CONST_INT:
7213 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7214 *total = 0;
7215 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7216 *total = 1;
7217 else
7218 *total = 3;
7219 return true;
7221 case SYMBOL_REF:
7222 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7223 return true;
7225 case MULT:
7226 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7227 ? COSTS_N_INSNS (3)
7228 : COSTS_N_INSNS (2));
7229 return true;
7231 return false;
7234 static int
7235 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7237 return 1;
7240 static bool
7241 mep_handle_option (size_t code,
7242 const char *arg ATTRIBUTE_UNUSED,
7243 int value ATTRIBUTE_UNUSED)
7245 int i;
7247 switch (code)
7249 case OPT_mall_opts:
7250 target_flags |= MEP_ALL_OPTS;
7251 break;
7253 case OPT_mno_opts:
7254 target_flags &= ~ MEP_ALL_OPTS;
7255 break;
7257 case OPT_mcop64:
7258 target_flags |= MASK_COP;
7259 target_flags |= MASK_64BIT_CR_REGS;
7260 break;
7262 case OPT_mtiny_:
7263 option_mtiny_specified = 1;
7265 case OPT_mivc2:
7266 target_flags |= MASK_COP;
7267 target_flags |= MASK_64BIT_CR_REGS;
7268 target_flags |= MASK_VLIW;
7269 target_flags |= MASK_OPT_VL64;
7270 target_flags |= MASK_IVC2;
7272 for (i=0; i<32; i++)
7273 fixed_regs[i+48] = 0;
7274 for (i=0; i<32; i++)
7275 call_used_regs[i+48] = 1;
7276 for (i=6; i<8; i++)
7277 call_used_regs[i+48] = 0;
7279 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7280 RN (0, "$csar0");
7281 RN (1, "$cc");
7282 RN (4, "$cofr0");
7283 RN (5, "$cofr1");
7284 RN (6, "$cofa0");
7285 RN (7, "$cofa1");
7286 RN (15, "$csar1");
7288 RN (16, "$acc0_0");
7289 RN (17, "$acc0_1");
7290 RN (18, "$acc0_2");
7291 RN (19, "$acc0_3");
7292 RN (20, "$acc0_4");
7293 RN (21, "$acc0_5");
7294 RN (22, "$acc0_6");
7295 RN (23, "$acc0_7");
7297 RN (24, "$acc1_0");
7298 RN (25, "$acc1_1");
7299 RN (26, "$acc1_2");
7300 RN (27, "$acc1_3");
7301 RN (28, "$acc1_4");
7302 RN (29, "$acc1_5");
7303 RN (30, "$acc1_6");
7304 RN (31, "$acc1_7");
7305 #undef RN
7307 break;
7309 default:
7310 break;
7312 return TRUE;
7315 static void
7316 mep_asm_init_sections (void)
7318 based_section
7319 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7320 "\t.section .based,\"aw\"");
7322 tinybss_section
7323 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7324 "\t.section .sbss,\"aw\"");
7326 sdata_section
7327 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7328 "\t.section .sdata,\"aw\",@progbits");
7330 far_section
7331 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7332 "\t.section .far,\"aw\"");
7334 farbss_section
7335 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7336 "\t.section .farbss,\"aw\"");
7338 frodata_section
7339 = get_unnamed_section (0, output_section_asm_op,
7340 "\t.section .frodata,\"a\"");
7342 srodata_section
7343 = get_unnamed_section (0, output_section_asm_op,
7344 "\t.section .srodata,\"a\"");
7346 vtext_section
7347 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7348 "\t.section .vtext,\"axv\"\n\t.vliw");
7350 vftext_section
7351 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7352 "\t.section .vftext,\"axv\"\n\t.vliw");
7354 ftext_section
7355 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7356 "\t.section .ftext,\"ax\"\n\t.core");
7360 /* Initialize the GCC target structure. */
7362 #undef TARGET_ASM_FUNCTION_PROLOGUE
7363 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7364 #undef TARGET_ATTRIBUTE_TABLE
7365 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7366 #undef TARGET_COMP_TYPE_ATTRIBUTES
7367 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7368 #undef TARGET_INSERT_ATTRIBUTES
7369 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7370 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7371 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7372 #undef TARGET_CAN_INLINE_P
7373 #define TARGET_CAN_INLINE_P mep_can_inline_p
7374 #undef TARGET_SECTION_TYPE_FLAGS
7375 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7376 #undef TARGET_ASM_NAMED_SECTION
7377 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7378 #undef TARGET_INIT_BUILTINS
7379 #define TARGET_INIT_BUILTINS mep_init_builtins
7380 #undef TARGET_EXPAND_BUILTIN
7381 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7382 #undef TARGET_SCHED_ADJUST_COST
7383 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7384 #undef TARGET_SCHED_ISSUE_RATE
7385 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7386 #undef TARGET_SCHED_REORDER
7387 #define TARGET_SCHED_REORDER mep_sched_reorder
7388 #undef TARGET_STRIP_NAME_ENCODING
7389 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7390 #undef TARGET_ASM_SELECT_SECTION
7391 #define TARGET_ASM_SELECT_SECTION mep_select_section
7392 #undef TARGET_ASM_UNIQUE_SECTION
7393 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7394 #undef TARGET_ENCODE_SECTION_INFO
7395 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7396 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7397 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7398 #undef TARGET_RTX_COSTS
7399 #define TARGET_RTX_COSTS mep_rtx_cost
7400 #undef TARGET_ADDRESS_COST
7401 #define TARGET_ADDRESS_COST mep_address_cost
7402 #undef TARGET_MACHINE_DEPENDENT_REORG
7403 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7404 #undef TARGET_SETUP_INCOMING_VARARGS
7405 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7406 #undef TARGET_PASS_BY_REFERENCE
7407 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7408 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7409 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7410 #undef TARGET_HANDLE_OPTION
7411 #define TARGET_HANDLE_OPTION mep_handle_option
7412 #undef TARGET_OPTION_OVERRIDE
7413 #define TARGET_OPTION_OVERRIDE mep_option_override
7414 #undef TARGET_OPTION_OPTIMIZATION
7415 #define TARGET_OPTION_OPTIMIZATION mep_option_optimization
7416 #undef TARGET_DEFAULT_TARGET_FLAGS
7417 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7418 #undef TARGET_ALLOCATE_INITIAL_VALUE
7419 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7420 #undef TARGET_ASM_INIT_SECTIONS
7421 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7422 #undef TARGET_RETURN_IN_MEMORY
7423 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7424 #undef TARGET_NARROW_VOLATILE_BITFIELD
7425 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7426 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7427 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7428 #undef TARGET_BUILD_BUILTIN_VA_LIST
7429 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7430 #undef TARGET_EXPAND_BUILTIN_VA_START
7431 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7432 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7433 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7434 #undef TARGET_CAN_ELIMINATE
7435 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7436 #undef TARGET_TRAMPOLINE_INIT
7437 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7439 struct gcc_target targetm = TARGET_INITIALIZER;
7441 #include "gt-mep.h"