mep.md (eh_epilogue): Defer until after epilogue is emitted.
[official-gcc.git] / gcc / config / mep / mep.c
blob6ff6405c571b55c28659fa3ec48b1e13e95fd9b2
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "obstack.h"
39 #include "tree.h"
40 #include "expr.h"
41 #include "except.h"
42 #include "function.h"
43 #include "optabs.h"
44 #include "reload.h"
45 #include "tm_p.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "integrate.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "df.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
58 + Reload Support
59 + Costs
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
62 + Operand Printing
63 + Function args in registers
64 + Handle pipeline hazards
65 + Handle attributes
66 + Trampolines
67 + Machine-dependent Reorg
68 + Builtins. */
70 /* Symbol encodings:
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
74 b - based
75 t - tiny
76 n - near
77 f - far
78 i - io, near
79 I - io, far
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
86 /* For varargs. */
87 int arg_regs_to_save;
88 int regsave_filler;
89 int frame_filler;
90 int frame_locked;
92 /* Records __builtin_return address. */
93 rtx eh_stack_adjust;
95 int reg_save_size;
96 int reg_save_slot[FIRST_PSEUDO_REGISTER];
97 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
99 /* 2 if the current function has an interrupt attribute, 1 if not, 0
100 if unknown. This is here because resource.c uses EPILOGUE_USES
101 which needs it. */
102 int interrupt_handler;
104 /* Likewise, for disinterrupt attribute. */
105 int disable_interrupts;
107 /* Number of doloop tags used so far. */
108 int doloop_tags;
110 /* True if the last tag was allocated to a doloop_end. */
111 bool doloop_tag_from_end;
113 /* True if reload changes $TP. */
114 bool reload_changes_tp;
116 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
117 We only set this if the function is an interrupt handler. */
118 int asms_without_operands;
121 #define MEP_CONTROL_REG(x) \
122 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
124 static const struct attribute_spec mep_attribute_table[11];
126 static GTY(()) section * based_section;
127 static GTY(()) section * tinybss_section;
128 static GTY(()) section * far_section;
129 static GTY(()) section * farbss_section;
130 static GTY(()) section * frodata_section;
131 static GTY(()) section * srodata_section;
133 static GTY(()) section * vtext_section;
134 static GTY(()) section * vftext_section;
135 static GTY(()) section * ftext_section;
137 static void mep_set_leaf_registers (int);
138 static bool symbol_p (rtx);
139 static bool symbolref_p (rtx);
140 static void encode_pattern_1 (rtx);
141 static void encode_pattern (rtx);
142 static bool const_in_range (rtx, int, int);
143 static void mep_rewrite_mult (rtx, rtx);
144 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149 static bool mep_nongeneral_reg (rtx);
150 static bool mep_general_copro_reg (rtx);
151 static bool mep_nonregister (rtx);
152 static struct machine_function* mep_init_machine_status (void);
153 static rtx mep_tp_rtx (void);
154 static rtx mep_gp_rtx (void);
155 static bool mep_interrupt_p (void);
156 static bool mep_disinterrupt_p (void);
157 static bool mep_reg_set_p (rtx, rtx);
158 static bool mep_reg_set_in_function (int);
159 static bool mep_interrupt_saved_reg (int);
160 static bool mep_call_saves_register (int);
161 static rtx F (rtx);
162 static void add_constant (int, int, int, int);
163 static bool mep_function_uses_sp (void);
164 static rtx maybe_dead_move (rtx, rtx, bool);
165 static void mep_reload_pointer (int, const char *);
166 static void mep_start_function (FILE *, HOST_WIDE_INT);
167 static bool mep_function_ok_for_sibcall (tree, tree);
168 static int unique_bit_in (HOST_WIDE_INT);
169 static int bit_size_for_clip (HOST_WIDE_INT);
170 static int bytesize (const_tree, enum machine_mode);
171 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
172 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
173 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
174 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
175 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
176 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
177 static bool mep_function_attribute_inlinable_p (const_tree);
178 static bool mep_can_inline_p (tree, tree);
179 static bool mep_lookup_pragma_disinterrupt (const char *);
180 static int mep_multiple_address_regions (tree, bool);
181 static int mep_attrlist_to_encoding (tree, tree);
182 static void mep_insert_attributes (tree, tree *);
183 static void mep_encode_section_info (tree, rtx, int);
184 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
185 static void mep_unique_section (tree, int);
186 static unsigned int mep_section_type_flags (tree, const char *, int);
187 static void mep_asm_named_section (const char *, unsigned int, tree);
188 static bool mep_mentioned_p (rtx, rtx, int);
189 static void mep_reorg_regmove (rtx);
190 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
191 static void mep_reorg_repeat (rtx);
192 static bool mep_invertable_branch_p (rtx);
193 static void mep_invert_branch (rtx, rtx);
194 static void mep_reorg_erepeat (rtx);
195 static void mep_jmp_return_reorg (rtx);
196 static void mep_reorg_addcombine (rtx);
197 static void mep_reorg (void);
198 static void mep_init_intrinsics (void);
199 static void mep_init_builtins (void);
200 static void mep_intrinsic_unavailable (int);
201 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
202 static bool mep_get_move_insn (int, const struct cgen_insn **);
203 static rtx mep_convert_arg (enum machine_mode, rtx);
204 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
205 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
206 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
207 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
208 static int mep_adjust_cost (rtx, rtx, rtx, int);
209 static int mep_issue_rate (void);
210 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
211 static void mep_move_ready_insn (rtx *, int, rtx);
212 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
213 static rtx mep_make_bundle (rtx, rtx);
214 static void mep_bundle_insns (rtx);
215 static bool mep_rtx_cost (rtx, int, int, int *, bool);
216 static int mep_address_cost (rtx, bool);
217 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
218 tree, int *, int);
219 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
220 const_tree, bool);
221 static bool mep_vector_mode_supported_p (enum machine_mode);
222 static bool mep_handle_option (size_t, const char *, int);
223 static rtx mep_allocate_initial_value (rtx);
224 static void mep_asm_init_sections (void);
225 static int mep_comp_type_attributes (const_tree, const_tree);
226 static bool mep_narrow_volatile_bitfield (void);
227 static rtx mep_expand_builtin_saveregs (void);
228 static tree mep_build_builtin_va_list (void);
229 static void mep_expand_va_start (tree, rtx);
230 static tree mep_gimplify_va_arg_expr (tree, tree, tree *, tree *);
231 static bool mep_can_eliminate (const int, const int);
233 /* Initialize the GCC target structure. */
235 #undef TARGET_ASM_FUNCTION_PROLOGUE
236 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
237 #undef TARGET_ATTRIBUTE_TABLE
238 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
239 #undef TARGET_COMP_TYPE_ATTRIBUTES
240 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
241 #undef TARGET_INSERT_ATTRIBUTES
242 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
243 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
244 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
245 #undef TARGET_CAN_INLINE_P
246 #define TARGET_CAN_INLINE_P mep_can_inline_p
247 #undef TARGET_SECTION_TYPE_FLAGS
248 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
249 #undef TARGET_ASM_NAMED_SECTION
250 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
251 #undef TARGET_INIT_BUILTINS
252 #define TARGET_INIT_BUILTINS mep_init_builtins
253 #undef TARGET_EXPAND_BUILTIN
254 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
255 #undef TARGET_SCHED_ADJUST_COST
256 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
257 #undef TARGET_SCHED_ISSUE_RATE
258 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
259 #undef TARGET_SCHED_REORDER
260 #define TARGET_SCHED_REORDER mep_sched_reorder
261 #undef TARGET_STRIP_NAME_ENCODING
262 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
263 #undef TARGET_ASM_SELECT_SECTION
264 #define TARGET_ASM_SELECT_SECTION mep_select_section
265 #undef TARGET_ASM_UNIQUE_SECTION
266 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
267 #undef TARGET_ENCODE_SECTION_INFO
268 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
269 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
270 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
271 #undef TARGET_RTX_COSTS
272 #define TARGET_RTX_COSTS mep_rtx_cost
273 #undef TARGET_ADDRESS_COST
274 #define TARGET_ADDRESS_COST mep_address_cost
275 #undef TARGET_MACHINE_DEPENDENT_REORG
276 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
277 #undef TARGET_SETUP_INCOMING_VARARGS
278 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
279 #undef TARGET_PASS_BY_REFERENCE
280 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
281 #undef TARGET_VECTOR_MODE_SUPPORTED_P
282 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
283 #undef TARGET_HANDLE_OPTION
284 #define TARGET_HANDLE_OPTION mep_handle_option
285 #undef TARGET_DEFAULT_TARGET_FLAGS
286 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
287 #undef TARGET_ALLOCATE_INITIAL_VALUE
288 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
289 #undef TARGET_ASM_INIT_SECTIONS
290 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
291 #undef TARGET_RETURN_IN_MEMORY
292 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
293 #undef TARGET_NARROW_VOLATILE_BITFIELD
294 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
295 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
296 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
297 #undef TARGET_BUILD_BUILTIN_VA_LIST
298 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
299 #undef TARGET_EXPAND_BUILTIN_VA_START
300 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
301 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
302 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
303 #undef TARGET_CAN_ELIMINATE
304 #define TARGET_CAN_ELIMINATE mep_can_eliminate
306 struct gcc_target targetm = TARGET_INITIALIZER;
308 #define WANT_GCC_DEFINITIONS
309 #include "mep-intrin.h"
310 #undef WANT_GCC_DEFINITIONS
313 /* Command Line Option Support. */
315 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
317 /* True if we can use cmov instructions to move values back and forth
318 between core and coprocessor registers. */
319 bool mep_have_core_copro_moves_p;
321 /* True if we can use cmov instructions (or a work-alike) to move
322 values between coprocessor registers. */
323 bool mep_have_copro_copro_moves_p;
325 /* A table of all coprocessor instructions that can act like
326 a coprocessor-to-coprocessor cmov. */
327 static const int mep_cmov_insns[] = {
328 mep_cmov,
329 mep_cpmov,
330 mep_fmovs,
331 mep_caddi3,
332 mep_csubi3,
333 mep_candi3,
334 mep_cori3,
335 mep_cxori3,
336 mep_cand3,
337 mep_cor3
340 static int option_mtiny_specified = 0;
343 static void
344 mep_set_leaf_registers (int enable)
346 int i;
348 if (mep_leaf_registers[0] != enable)
349 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
350 mep_leaf_registers[i] = enable;
353 void
354 mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
356 int i;
358 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
360 fixed_regs[HI_REGNO] = 1;
361 fixed_regs[LO_REGNO] = 1;
362 call_used_regs[HI_REGNO] = 1;
363 call_used_regs[LO_REGNO] = 1;
366 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
367 global_regs[i] = 1;
370 void
371 mep_optimization_options (void)
373 /* The first scheduling pass often increases register pressure and tends
374 to result in more spill code. Only run it when specifically asked. */
375 flag_schedule_insns = 0;
377 /* Using $fp doesn't gain us much, even when debugging is important. */
378 flag_omit_frame_pointer = 1;
381 void
382 mep_override_options (void)
384 if (flag_pic == 1)
385 warning (OPT_fpic, "-fpic is not supported");
386 if (flag_pic == 2)
387 warning (OPT_fPIC, "-fPIC is not supported");
388 if (TARGET_S && TARGET_M)
389 error ("only one of -ms and -mm may be given");
390 if (TARGET_S && TARGET_L)
391 error ("only one of -ms and -ml may be given");
392 if (TARGET_M && TARGET_L)
393 error ("only one of -mm and -ml may be given");
394 if (TARGET_S && option_mtiny_specified)
395 error ("only one of -ms and -mtiny= may be given");
396 if (TARGET_M && option_mtiny_specified)
397 error ("only one of -mm and -mtiny= may be given");
398 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
399 warning (0, "-mclip currently has no effect without -mminmax");
401 if (mep_const_section)
403 if (strcmp (mep_const_section, "tiny") != 0
404 && strcmp (mep_const_section, "near") != 0
405 && strcmp (mep_const_section, "far") != 0)
406 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
409 if (TARGET_S)
410 mep_tiny_cutoff = 65536;
411 if (TARGET_M)
412 mep_tiny_cutoff = 0;
413 if (TARGET_L && ! option_mtiny_specified)
414 mep_tiny_cutoff = 0;
416 if (TARGET_64BIT_CR_REGS)
417 flag_split_wide_types = 0;
419 init_machine_status = mep_init_machine_status;
420 mep_init_intrinsics ();
423 /* Pattern Support - constraints, predicates, expanders. */
425 /* MEP has very few instructions that can refer to the span of
426 addresses used by symbols, so it's common to check for them. */
428 static bool
429 symbol_p (rtx x)
431 int c = GET_CODE (x);
433 return (c == CONST_INT
434 || c == CONST
435 || c == SYMBOL_REF);
438 static bool
439 symbolref_p (rtx x)
441 int c;
443 if (GET_CODE (x) != MEM)
444 return false;
446 c = GET_CODE (XEXP (x, 0));
447 return (c == CONST_INT
448 || c == CONST
449 || c == SYMBOL_REF);
452 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
454 #define GEN_REG(R, STRICT) \
455 (GR_REGNO_P (R) \
456 || (!STRICT \
457 && ((R) == ARG_POINTER_REGNUM \
458 || (R) >= FIRST_PSEUDO_REGISTER)))
460 static char pattern[12], *patternp;
461 static GTY(()) rtx patternr[12];
462 #define RTX_IS(x) (strcmp (pattern, x) == 0)
464 static void
465 encode_pattern_1 (rtx x)
467 int i;
469 if (patternp == pattern + sizeof (pattern) - 2)
471 patternp[-1] = '?';
472 return;
475 patternr[patternp-pattern] = x;
477 switch (GET_CODE (x))
479 case REG:
480 *patternp++ = 'r';
481 break;
482 case MEM:
483 *patternp++ = 'm';
484 case CONST:
485 encode_pattern_1 (XEXP(x, 0));
486 break;
487 case PLUS:
488 *patternp++ = '+';
489 encode_pattern_1 (XEXP(x, 0));
490 encode_pattern_1 (XEXP(x, 1));
491 break;
492 case LO_SUM:
493 *patternp++ = 'L';
494 encode_pattern_1 (XEXP(x, 0));
495 encode_pattern_1 (XEXP(x, 1));
496 break;
497 case HIGH:
498 *patternp++ = 'H';
499 encode_pattern_1 (XEXP(x, 0));
500 break;
501 case SYMBOL_REF:
502 *patternp++ = 's';
503 break;
504 case LABEL_REF:
505 *patternp++ = 'l';
506 break;
507 case CONST_INT:
508 case CONST_DOUBLE:
509 *patternp++ = 'i';
510 break;
511 case UNSPEC:
512 *patternp++ = 'u';
513 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
514 for (i=0; i<XVECLEN (x, 0); i++)
515 encode_pattern_1 (XVECEXP (x, 0, i));
516 break;
517 case USE:
518 *patternp++ = 'U';
519 break;
520 default:
521 *patternp++ = '?';
522 #if 0
523 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
524 debug_rtx (x);
525 gcc_unreachable ();
526 #endif
527 break;
531 static void
532 encode_pattern (rtx x)
534 patternp = pattern;
535 encode_pattern_1 (x);
536 *patternp = 0;
540 mep_section_tag (rtx x)
542 const char *name;
544 while (1)
546 switch (GET_CODE (x))
548 case MEM:
549 case CONST:
550 x = XEXP (x, 0);
551 break;
552 case UNSPEC:
553 x = XVECEXP (x, 0, 0);
554 break;
555 case PLUS:
556 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
557 return 0;
558 x = XEXP (x, 0);
559 break;
560 default:
561 goto done;
564 done:
565 if (GET_CODE (x) != SYMBOL_REF)
566 return 0;
567 name = XSTR (x, 0);
568 if (name[0] == '@' && name[2] == '.')
570 if (name[1] == 'i' || name[1] == 'I')
572 if (name[1] == 'I')
573 return 'f'; /* near */
574 return 'n'; /* far */
576 return name[1];
578 return 0;
582 mep_regno_reg_class (int regno)
584 switch (regno)
586 case SP_REGNO: return SP_REGS;
587 case TP_REGNO: return TP_REGS;
588 case GP_REGNO: return GP_REGS;
589 case 0: return R0_REGS;
590 case HI_REGNO: return HI_REGS;
591 case LO_REGNO: return LO_REGS;
592 case ARG_POINTER_REGNUM: return GENERAL_REGS;
595 if (GR_REGNO_P (regno))
596 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
597 if (CONTROL_REGNO_P (regno))
598 return CONTROL_REGS;
600 if (CR_REGNO_P (regno))
602 int i, j;
604 /* Search for the register amongst user-defined subclasses of
605 the coprocessor registers. */
606 for (i = USER0_REGS; i <= USER3_REGS; ++i)
608 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
609 continue;
610 for (j = 0; j < N_REG_CLASSES; ++j)
612 enum reg_class sub = reg_class_subclasses[i][j];
614 if (sub == LIM_REG_CLASSES)
615 return i;
616 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
617 break;
621 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
624 if (CCR_REGNO_P (regno))
625 return CCR_REGS;
627 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
628 return NO_REGS;
631 #if 0
633 mep_reg_class_from_constraint (int c, const char *str)
635 switch (c)
637 case 'a':
638 return SP_REGS;
639 case 'b':
640 return TP_REGS;
641 case 'c':
642 return CONTROL_REGS;
643 case 'd':
644 return HILO_REGS;
645 case 'e':
647 switch (str[1])
649 case 'm':
650 return LOADABLE_CR_REGS;
651 case 'x':
652 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
653 case 'r':
654 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
655 default:
656 return NO_REGS;
659 case 'h':
660 return HI_REGS;
661 case 'j':
662 return RPC_REGS;
663 case 'l':
664 return LO_REGS;
665 case 't':
666 return TPREL_REGS;
667 case 'v':
668 return GP_REGS;
669 case 'x':
670 return CR_REGS;
671 case 'y':
672 return CCR_REGS;
673 case 'z':
674 return R0_REGS;
676 case 'A':
677 case 'B':
678 case 'C':
679 case 'D':
681 enum reg_class which = c - 'A' + USER0_REGS;
682 return (reg_class_size[which] > 0 ? which : NO_REGS);
685 default:
686 return NO_REGS;
690 bool
691 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
693 switch (c)
695 case 'I': return value >= -32768 && value < 32768;
696 case 'J': return value >= 0 && value < 65536;
697 case 'K': return value >= 0 && value < 0x01000000;
698 case 'L': return value >= -32 && value < 32;
699 case 'M': return value >= 0 && value < 32;
700 case 'N': return value >= 0 && value < 16;
701 case 'O':
702 if (value & 0xffff)
703 return false;
704 return value >= -2147483647-1 && value <= 2147483647;
705 default:
706 gcc_unreachable ();
710 bool
711 mep_extra_constraint (rtx value, int c)
713 encode_pattern (value);
715 switch (c)
717 case 'R':
718 /* For near symbols, like what call uses. */
719 if (GET_CODE (value) == REG)
720 return 0;
721 return mep_call_address_operand (value, GET_MODE (value));
723 case 'S':
724 /* For signed 8-bit immediates. */
725 return (GET_CODE (value) == CONST_INT
726 && INTVAL (value) >= -128
727 && INTVAL (value) <= 127);
729 case 'T':
730 /* For tp/gp relative symbol values. */
731 return (RTX_IS ("u3s") || RTX_IS ("u2s")
732 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
734 case 'U':
735 /* Non-absolute memories. */
736 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
738 case 'W':
739 /* %hi(sym) */
740 return RTX_IS ("Hs");
742 case 'Y':
743 /* Register indirect. */
744 return RTX_IS ("mr");
746 case 'Z':
747 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
750 return false;
752 #endif
754 #undef PASS
755 #undef FAIL
757 static bool
758 const_in_range (rtx x, int minv, int maxv)
760 return (GET_CODE (x) == CONST_INT
761 && INTVAL (x) >= minv
762 && INTVAL (x) <= maxv);
765 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
766 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
767 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
768 at the end of the insn stream. */
771 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
773 if (rtx_equal_p (dest, src1))
774 return src2;
775 else if (rtx_equal_p (dest, src2))
776 return src1;
777 else
779 if (insn == 0)
780 emit_insn (gen_movsi (copy_rtx (dest), src1));
781 else
782 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
783 return src2;
787 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
788 Change the last element of PATTERN from (clobber (scratch:SI))
789 to (clobber (reg:SI HI_REGNO)). */
791 static void
792 mep_rewrite_mult (rtx insn, rtx pattern)
794 rtx hi_clobber;
796 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
797 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
798 PATTERN (insn) = pattern;
799 INSN_CODE (insn) = -1;
802 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
803 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
804 store the result in DEST if nonnull. */
806 static void
807 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
809 rtx lo, pattern;
811 lo = gen_rtx_REG (SImode, LO_REGNO);
812 if (dest)
813 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
814 mep_mulr_source (insn, dest, src1, src2));
815 else
816 pattern = gen_mulsi3_lo (lo, src1, src2);
817 mep_rewrite_mult (insn, pattern);
820 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
821 SRC3 into $lo, then use either madd or maddr. The move into $lo will
822 be deleted by a peephole2 if SRC3 is already in $lo. */
824 static void
825 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
827 rtx lo, pattern;
829 lo = gen_rtx_REG (SImode, LO_REGNO);
830 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
831 if (dest)
832 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
833 mep_mulr_source (insn, dest, src1, src2),
834 copy_rtx (lo));
835 else
836 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
837 mep_rewrite_mult (insn, pattern);
840 /* Return true if $lo has the same value as integer register GPR when
841 instruction INSN is reached. If necessary, rewrite the instruction
842 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
843 rtx for (reg:SI LO_REGNO).
845 This function is intended to be used by the peephole2 pass. Since
846 that pass goes from the end of a basic block to the beginning, and
847 propagates liveness information on the way, there is no need to
848 update register notes here.
850 If GPR_DEAD_P is true on entry, and this function returns true,
851 then the caller will replace _every_ use of GPR in and after INSN
852 with LO. This means that if the instruction that sets $lo is a
853 mulr- or maddr-type instruction, we can rewrite it to use mul or
854 madd instead. In combination with the copy progagation pass,
855 this allows us to replace sequences like:
857 mov GPR,R1
858 mulr GPR,R2
860 with:
862 mul R1,R2
864 if GPR is no longer used. */
866 static bool
867 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
871 insn = PREV_INSN (insn);
872 if (INSN_P (insn))
873 switch (recog_memoized (insn))
875 case CODE_FOR_mulsi3_1:
876 extract_insn (insn);
877 if (rtx_equal_p (recog_data.operand[0], gpr))
879 mep_rewrite_mulsi3 (insn,
880 gpr_dead_p ? NULL : recog_data.operand[0],
881 recog_data.operand[1],
882 recog_data.operand[2]);
883 return true;
885 return false;
887 case CODE_FOR_maddsi3:
888 extract_insn (insn);
889 if (rtx_equal_p (recog_data.operand[0], gpr))
891 mep_rewrite_maddsi3 (insn,
892 gpr_dead_p ? NULL : recog_data.operand[0],
893 recog_data.operand[1],
894 recog_data.operand[2],
895 recog_data.operand[3]);
896 return true;
898 return false;
900 case CODE_FOR_mulsi3r:
901 case CODE_FOR_maddsi3r:
902 extract_insn (insn);
903 return rtx_equal_p (recog_data.operand[1], gpr);
905 default:
906 if (reg_set_p (lo, insn)
907 || reg_set_p (gpr, insn)
908 || volatile_insn_p (PATTERN (insn)))
909 return false;
911 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
912 gpr_dead_p = false;
913 break;
916 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
917 return false;
920 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
922 bool
923 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
925 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
926 extract_insn (insn);
927 return result;
930 /* Return true if SET can be turned into a post-modify load or store
931 that adds OFFSET to GPR. In other words, return true if SET can be
932 changed into:
934 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
936 It's OK to change SET to an equivalent operation in order to
937 make it match. */
939 static bool
940 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
942 rtx *reg, *mem;
943 unsigned int reg_bytes, mem_bytes;
944 enum machine_mode reg_mode, mem_mode;
946 /* Only simple SETs can be converted. */
947 if (GET_CODE (set) != SET)
948 return false;
950 /* Point REG to what we hope will be the register side of the set and
951 MEM to what we hope will be the memory side. */
952 if (GET_CODE (SET_DEST (set)) == MEM)
954 mem = &SET_DEST (set);
955 reg = &SET_SRC (set);
957 else
959 reg = &SET_DEST (set);
960 mem = &SET_SRC (set);
961 if (GET_CODE (*mem) == SIGN_EXTEND)
962 mem = &XEXP (*mem, 0);
965 /* Check that *REG is a suitable coprocessor register. */
966 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
967 return false;
969 /* Check that *MEM is a suitable memory reference. */
970 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
971 return false;
973 /* Get the number of bytes in each operand. */
974 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
975 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
977 /* Check that OFFSET is suitably aligned. */
978 if (INTVAL (offset) & (mem_bytes - 1))
979 return false;
981 /* Convert *MEM to a normal integer mode. */
982 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
983 *mem = change_address (*mem, mem_mode, NULL);
985 /* Adjust *REG as well. */
986 *reg = shallow_copy_rtx (*reg);
987 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
989 /* SET is a subword load. Convert it to an explicit extension. */
990 PUT_MODE (*reg, SImode);
991 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
993 else
995 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
996 PUT_MODE (*reg, reg_mode);
998 return true;
1001 /* Return the effect of frame-related instruction INSN. */
1003 static rtx
1004 mep_frame_expr (rtx insn)
1006 rtx note, expr;
1008 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
1009 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
1010 RTX_FRAME_RELATED_P (expr) = 1;
1011 return expr;
1014 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1015 new pattern in INSN1; INSN2 will be deleted by the caller. */
1017 static void
1018 mep_make_parallel (rtx insn1, rtx insn2)
1020 rtx expr;
1022 if (RTX_FRAME_RELATED_P (insn2))
1024 expr = mep_frame_expr (insn2);
1025 if (RTX_FRAME_RELATED_P (insn1))
1026 expr = gen_rtx_SEQUENCE (VOIDmode,
1027 gen_rtvec (2, mep_frame_expr (insn1), expr));
1028 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1029 RTX_FRAME_RELATED_P (insn1) = 1;
1032 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1033 gen_rtvec (2, PATTERN (insn1),
1034 PATTERN (insn2)));
1035 INSN_CODE (insn1) = -1;
1038 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1039 the basic block to see if any previous load or store instruction can
1040 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1042 static bool
1043 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1045 rtx insn;
1047 insn = set_insn;
1050 insn = PREV_INSN (insn);
1051 if (INSN_P (insn))
1053 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1055 mep_make_parallel (insn, set_insn);
1056 return true;
1059 if (reg_set_p (reg, insn)
1060 || reg_referenced_p (reg, PATTERN (insn))
1061 || volatile_insn_p (PATTERN (insn)))
1062 return false;
1065 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1066 return false;
1069 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1071 bool
1072 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1074 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1075 extract_insn (insn);
1076 return result;
1079 bool
1080 mep_allow_clip (rtx ux, rtx lx, int s)
1082 HOST_WIDE_INT u = INTVAL (ux);
1083 HOST_WIDE_INT l = INTVAL (lx);
1084 int i;
1086 if (!TARGET_OPT_CLIP)
1087 return false;
1089 if (s)
1091 for (i = 0; i < 30; i ++)
1092 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1093 && (l == - ((HOST_WIDE_INT) 1 << i)))
1094 return true;
1096 else
1098 if (l != 0)
1099 return false;
1101 for (i = 0; i < 30; i ++)
1102 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1103 return true;
1105 return false;
1108 bool
1109 mep_bit_position_p (rtx x, bool looking_for)
1111 if (GET_CODE (x) != CONST_INT)
1112 return false;
1113 switch ((int) INTVAL(x) & 0xff)
1115 case 0x01: case 0x02: case 0x04: case 0x08:
1116 case 0x10: case 0x20: case 0x40: case 0x80:
1117 return looking_for;
1118 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1119 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1120 return !looking_for;
1122 return false;
1125 static bool
1126 move_needs_splitting (rtx dest, rtx src,
1127 enum machine_mode mode ATTRIBUTE_UNUSED)
1129 int s = mep_section_tag (src);
1131 while (1)
1133 if (GET_CODE (src) == CONST
1134 || GET_CODE (src) == MEM)
1135 src = XEXP (src, 0);
1136 else if (GET_CODE (src) == SYMBOL_REF
1137 || GET_CODE (src) == LABEL_REF
1138 || GET_CODE (src) == PLUS)
1139 break;
1140 else
1141 return false;
1143 if (s == 'f'
1144 || (GET_CODE (src) == PLUS
1145 && GET_CODE (XEXP (src, 1)) == CONST_INT
1146 && (INTVAL (XEXP (src, 1)) < -65536
1147 || INTVAL (XEXP (src, 1)) > 0xffffff))
1148 || (GET_CODE (dest) == REG
1149 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1150 return true;
1151 return false;
1154 bool
1155 mep_split_mov (rtx *operands, int symbolic)
1157 if (symbolic)
1159 if (move_needs_splitting (operands[0], operands[1], SImode))
1160 return true;
1161 return false;
1164 if (GET_CODE (operands[1]) != CONST_INT)
1165 return false;
1167 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1168 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1169 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1170 return false;
1172 if (((!reload_completed && !reload_in_progress)
1173 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1174 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1175 return false;
1177 return true;
1180 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1181 it to one specific value. So the insn chosen depends on whether
1182 the source and destination modes match. */
1184 bool
1185 mep_vliw_mode_match (rtx tgt)
1187 bool src_vliw = mep_vliw_function_p (cfun->decl);
1188 bool tgt_vliw = INTVAL (tgt);
1190 return src_vliw == tgt_vliw;
1193 /* Like the above, but also test for near/far mismatches. */
1195 bool
1196 mep_vliw_jmp_match (rtx tgt)
1198 bool src_vliw = mep_vliw_function_p (cfun->decl);
1199 bool tgt_vliw = INTVAL (tgt);
1201 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1202 return false;
1204 return src_vliw == tgt_vliw;
1207 bool
1208 mep_multi_slot (rtx x)
1210 return get_attr_slot (x) == SLOT_MULTI;
1214 bool
1215 mep_legitimate_constant_p (rtx x)
1217 /* We can't convert symbol values to gp- or tp-rel values after
1218 reload, as reload might have used $gp or $tp for other
1219 purposes. */
1220 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1222 char e = mep_section_tag (x);
1223 return (e != 't' && e != 'b');
1225 return 1;
1228 /* Be careful not to use macros that need to be compiled one way for
1229 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1231 bool
1232 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1234 int the_tag;
1236 #define DEBUG_LEGIT 0
1237 #if DEBUG_LEGIT
1238 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1239 debug_rtx (x);
1240 #endif
1242 if (GET_CODE (x) == LO_SUM
1243 && GET_CODE (XEXP (x, 0)) == REG
1244 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1245 && CONSTANT_P (XEXP (x, 1)))
1247 if (GET_MODE_SIZE (mode) > 4)
1249 /* We will end up splitting this, and lo_sums are not
1250 offsettable for us. */
1251 #if DEBUG_LEGIT
1252 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1253 #endif
1254 return false;
1256 #if DEBUG_LEGIT
1257 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1258 #endif
1259 return true;
1262 if (GET_CODE (x) == REG
1263 && GEN_REG (REGNO (x), strict))
1265 #if DEBUG_LEGIT
1266 fprintf (stderr, " - yup, [reg]\n");
1267 #endif
1268 return true;
1271 if (GET_CODE (x) == PLUS
1272 && GET_CODE (XEXP (x, 0)) == REG
1273 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1274 && const_in_range (XEXP (x, 1), -32768, 32767))
1276 #if DEBUG_LEGIT
1277 fprintf (stderr, " - yup, [reg+const]\n");
1278 #endif
1279 return true;
1282 if (GET_CODE (x) == PLUS
1283 && GET_CODE (XEXP (x, 0)) == REG
1284 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1285 && GET_CODE (XEXP (x, 1)) == CONST
1286 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1287 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1288 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1289 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1291 #if DEBUG_LEGIT
1292 fprintf (stderr, " - yup, [reg+unspec]\n");
1293 #endif
1294 return true;
1297 the_tag = mep_section_tag (x);
1299 if (the_tag == 'f')
1301 #if DEBUG_LEGIT
1302 fprintf (stderr, " - nope, [far]\n");
1303 #endif
1304 return false;
1307 if (mode == VOIDmode
1308 && GET_CODE (x) == SYMBOL_REF)
1310 #if DEBUG_LEGIT
1311 fprintf (stderr, " - yup, call [symbol]\n");
1312 #endif
1313 return true;
1316 if ((mode == SImode || mode == SFmode)
1317 && CONSTANT_P (x)
1318 && LEGITIMATE_CONSTANT_P (x)
1319 && the_tag != 't' && the_tag != 'b')
1321 if (GET_CODE (x) != CONST_INT
1322 || (INTVAL (x) <= 0xfffff
1323 && INTVAL (x) >= 0
1324 && (INTVAL (x) % 4) == 0))
1326 #if DEBUG_LEGIT
1327 fprintf (stderr, " - yup, [const]\n");
1328 #endif
1329 return true;
1333 #if DEBUG_LEGIT
1334 fprintf (stderr, " - nope.\n");
1335 #endif
1336 return false;
1340 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1341 enum reload_type type,
1342 int ind_levels ATTRIBUTE_UNUSED)
1344 if (GET_CODE (*x) == PLUS
1345 && GET_CODE (XEXP (*x, 0)) == MEM
1346 && GET_CODE (XEXP (*x, 1)) == REG)
1348 /* GCC will by default copy the MEM into a REG, which results in
1349 an invalid address. For us, the best thing to do is move the
1350 whole expression to a REG. */
1351 push_reload (*x, NULL_RTX, x, NULL,
1352 GENERAL_REGS, mode, VOIDmode,
1353 0, 0, opnum, type);
1354 return 1;
1357 if (GET_CODE (*x) == PLUS
1358 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1359 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1361 char e = mep_section_tag (XEXP (*x, 0));
1363 if (e != 't' && e != 'b')
1365 /* GCC thinks that (sym+const) is a valid address. Well,
1366 sometimes it is, this time it isn't. The best thing to
1367 do is reload the symbol to a register, since reg+int
1368 tends to work, and we can't just add the symbol and
1369 constant anyway. */
1370 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1371 GENERAL_REGS, mode, VOIDmode,
1372 0, 0, opnum, type);
1373 return 1;
1376 return 0;
1380 mep_core_address_length (rtx insn, int opn)
1382 rtx set = single_set (insn);
1383 rtx mem = XEXP (set, opn);
1384 rtx other = XEXP (set, 1-opn);
1385 rtx addr = XEXP (mem, 0);
1387 if (register_operand (addr, Pmode))
1388 return 2;
1389 if (GET_CODE (addr) == PLUS)
1391 rtx addend = XEXP (addr, 1);
1393 gcc_assert (REG_P (XEXP (addr, 0)));
1395 switch (REGNO (XEXP (addr, 0)))
1397 case STACK_POINTER_REGNUM:
1398 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1399 && mep_imm7a4_operand (addend, VOIDmode))
1400 return 2;
1401 break;
1403 case 13: /* TP */
1404 gcc_assert (REG_P (other));
1406 if (REGNO (other) >= 8)
1407 break;
1409 if (GET_CODE (addend) == CONST
1410 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1411 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1412 return 2;
1414 if (GET_CODE (addend) == CONST_INT
1415 && INTVAL (addend) >= 0
1416 && INTVAL (addend) <= 127
1417 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1418 return 2;
1419 break;
1423 return 4;
1427 mep_cop_address_length (rtx insn, int opn)
1429 rtx set = single_set (insn);
1430 rtx mem = XEXP (set, opn);
1431 rtx addr = XEXP (mem, 0);
1433 if (GET_CODE (mem) != MEM)
1434 return 2;
1435 if (register_operand (addr, Pmode))
1436 return 2;
1437 if (GET_CODE (addr) == POST_INC)
1438 return 2;
1440 return 4;
1443 #define DEBUG_EXPAND_MOV 0
1444 bool
1445 mep_expand_mov (rtx *operands, enum machine_mode mode)
1447 int i, t;
1448 int tag[2];
1449 rtx tpsym, tpoffs;
1450 int post_reload = 0;
1452 tag[0] = mep_section_tag (operands[0]);
1453 tag[1] = mep_section_tag (operands[1]);
1455 if (!reload_in_progress
1456 && !reload_completed
1457 && GET_CODE (operands[0]) != REG
1458 && GET_CODE (operands[0]) != SUBREG
1459 && GET_CODE (operands[1]) != REG
1460 && GET_CODE (operands[1]) != SUBREG)
1461 operands[1] = copy_to_mode_reg (mode, operands[1]);
1463 #if DEBUG_EXPAND_MOV
1464 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1465 reload_in_progress || reload_completed);
1466 debug_rtx (operands[0]);
1467 debug_rtx (operands[1]);
1468 #endif
1470 if (mode == DImode || mode == DFmode)
1471 return false;
1473 if (reload_in_progress || reload_completed)
1475 rtx r;
1477 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1478 cfun->machine->reload_changes_tp = true;
1480 if (tag[0] == 't' || tag[1] == 't')
1482 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1483 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1484 post_reload = 1;
1486 if (tag[0] == 'b' || tag[1] == 'b')
1488 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1489 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1490 post_reload = 1;
1492 if (cfun->machine->reload_changes_tp == true)
1493 post_reload = 1;
1496 if (!post_reload)
1498 rtx n;
1499 if (symbol_p (operands[1]))
1501 t = mep_section_tag (operands[1]);
1502 if (t == 'b' || t == 't')
1505 if (GET_CODE (operands[1]) == SYMBOL_REF)
1507 tpsym = operands[1];
1508 n = gen_rtx_UNSPEC (mode,
1509 gen_rtvec (1, operands[1]),
1510 t == 'b' ? UNS_TPREL : UNS_GPREL);
1511 n = gen_rtx_CONST (mode, n);
1513 else if (GET_CODE (operands[1]) == CONST
1514 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1515 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1516 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1518 tpsym = XEXP (XEXP (operands[1], 0), 0);
1519 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1520 n = gen_rtx_UNSPEC (mode,
1521 gen_rtvec (1, tpsym),
1522 t == 'b' ? UNS_TPREL : UNS_GPREL);
1523 n = gen_rtx_PLUS (mode, n, tpoffs);
1524 n = gen_rtx_CONST (mode, n);
1526 else if (GET_CODE (operands[1]) == CONST
1527 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1528 return false;
1529 else
1531 error ("unusual TP-relative address");
1532 return false;
1535 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1536 : mep_gp_rtx ()), n);
1537 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1538 #if DEBUG_EXPAND_MOV
1539 fprintf(stderr, "mep_expand_mov emitting ");
1540 debug_rtx(n);
1541 #endif
1542 return true;
1546 for (i=0; i < 2; i++)
1548 t = mep_section_tag (operands[i]);
1549 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1551 rtx sym, n, r;
1552 int u;
1554 sym = XEXP (operands[i], 0);
1555 if (GET_CODE (sym) == CONST
1556 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1557 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1559 if (t == 'b')
1561 r = mep_tp_rtx ();
1562 u = UNS_TPREL;
1564 else
1566 r = mep_gp_rtx ();
1567 u = UNS_GPREL;
1570 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1571 n = gen_rtx_CONST (Pmode, n);
1572 n = gen_rtx_PLUS (Pmode, r, n);
1573 operands[i] = replace_equiv_address (operands[i], n);
1578 if ((GET_CODE (operands[1]) != REG
1579 && MEP_CONTROL_REG (operands[0]))
1580 || (GET_CODE (operands[0]) != REG
1581 && MEP_CONTROL_REG (operands[1])))
1583 rtx temp;
1584 #if DEBUG_EXPAND_MOV
1585 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1586 #endif
1587 temp = gen_reg_rtx (mode);
1588 emit_move_insn (temp, operands[1]);
1589 operands[1] = temp;
1592 if (symbolref_p (operands[0])
1593 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1594 || (GET_MODE_SIZE (mode) != 4)))
1596 rtx temp;
1598 gcc_assert (!reload_in_progress && !reload_completed);
1600 temp = force_reg (Pmode, XEXP (operands[0], 0));
1601 operands[0] = replace_equiv_address (operands[0], temp);
1602 emit_move_insn (operands[0], operands[1]);
1603 return true;
1606 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1607 tag[1] = 0;
1609 if (symbol_p (operands[1])
1610 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1612 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1613 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1614 return true;
1617 if (symbolref_p (operands[1])
1618 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1620 rtx temp;
1622 if (reload_in_progress || reload_completed)
1623 temp = operands[0];
1624 else
1625 temp = gen_reg_rtx (Pmode);
1627 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1628 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1629 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1630 return true;
1633 return false;
1636 /* Cases where the pattern can't be made to use at all. */
1638 bool
1639 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1641 int i;
1643 #define DEBUG_MOV_OK 0
1644 #if DEBUG_MOV_OK
1645 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1646 mep_section_tag (operands[1]));
1647 debug_rtx (operands[0]);
1648 debug_rtx (operands[1]);
1649 #endif
1651 /* We want the movh patterns to get these. */
1652 if (GET_CODE (operands[1]) == HIGH)
1653 return false;
1655 /* We can't store a register to a far variable without using a
1656 scratch register to hold the address. Using far variables should
1657 be split by mep_emit_mov anyway. */
1658 if (mep_section_tag (operands[0]) == 'f'
1659 || mep_section_tag (operands[1]) == 'f')
1661 #if DEBUG_MOV_OK
1662 fprintf (stderr, " - no, f\n");
1663 #endif
1664 return false;
1666 i = mep_section_tag (operands[1]);
1667 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1668 /* These are supposed to be generated with adds of the appropriate
1669 register. During and after reload, however, we allow them to
1670 be accessed as normal symbols because adding a dependency on
1671 the base register now might cause problems. */
1673 #if DEBUG_MOV_OK
1674 fprintf (stderr, " - no, bt\n");
1675 #endif
1676 return false;
1679 /* The only moves we can allow involve at least one general
1680 register, so require it. */
1681 for (i = 0; i < 2; i ++)
1683 /* Allow subregs too, before reload. */
1684 rtx x = operands[i];
1686 if (GET_CODE (x) == SUBREG)
1687 x = XEXP (x, 0);
1688 if (GET_CODE (x) == REG
1689 && ! MEP_CONTROL_REG (x))
1691 #if DEBUG_MOV_OK
1692 fprintf (stderr, " - ok\n");
1693 #endif
1694 return true;
1697 #if DEBUG_MOV_OK
1698 fprintf (stderr, " - no, no gen reg\n");
1699 #endif
1700 return false;
1703 #define DEBUG_SPLIT_WIDE_MOVE 0
1704 void
1705 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1707 int i;
1709 #if DEBUG_SPLIT_WIDE_MOVE
1710 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1711 debug_rtx (operands[0]);
1712 debug_rtx (operands[1]);
1713 #endif
1715 for (i = 0; i <= 1; i++)
1717 rtx op = operands[i], hi, lo;
1719 switch (GET_CODE (op))
1721 case REG:
1723 unsigned int regno = REGNO (op);
1725 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1727 rtx i32;
1729 lo = gen_rtx_REG (SImode, regno);
1730 i32 = GEN_INT (32);
1731 hi = gen_rtx_ZERO_EXTRACT (SImode,
1732 gen_rtx_REG (DImode, regno),
1733 i32, i32);
1735 else
1737 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1738 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1741 break;
1743 case CONST_INT:
1744 case CONST_DOUBLE:
1745 case MEM:
1746 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1747 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1748 break;
1750 default:
1751 gcc_unreachable ();
1754 /* The high part of CR <- GPR moves must be done after the low part. */
1755 operands [i + 4] = lo;
1756 operands [i + 2] = hi;
1759 if (reg_mentioned_p (operands[2], operands[5])
1760 || GET_CODE (operands[2]) == ZERO_EXTRACT
1761 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1763 rtx tmp;
1765 /* Overlapping register pairs -- make sure we don't
1766 early-clobber ourselves. */
1767 tmp = operands[2];
1768 operands[2] = operands[4];
1769 operands[4] = tmp;
1770 tmp = operands[3];
1771 operands[3] = operands[5];
1772 operands[5] = tmp;
1775 #if DEBUG_SPLIT_WIDE_MOVE
1776 fprintf(stderr, "\033[34m");
1777 debug_rtx (operands[2]);
1778 debug_rtx (operands[3]);
1779 debug_rtx (operands[4]);
1780 debug_rtx (operands[5]);
1781 fprintf(stderr, "\033[0m");
1782 #endif
1785 /* Emit a setcc instruction in its entirity. */
1787 static bool
1788 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1790 rtx tmp;
1792 switch (code)
1794 case GT:
1795 case GTU:
1796 tmp = op1, op1 = op2, op2 = tmp;
1797 code = swap_condition (code);
1798 /* FALLTHRU */
1800 case LT:
1801 case LTU:
1802 op1 = force_reg (SImode, op1);
1803 emit_insn (gen_rtx_SET (VOIDmode, dest,
1804 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1805 return true;
1807 case EQ:
1808 if (op2 != const0_rtx)
1809 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1810 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1811 return true;
1813 case NE:
1814 /* Branchful sequence:
1815 mov dest, 0 16-bit
1816 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1817 mov dest, 1 16-bit
1819 Branchless sequence:
1820 add3 tmp, op1, -op2 32-bit (or mov + sub)
1821 sltu3 tmp, tmp, 1 16-bit
1822 xor3 dest, tmp, 1 32-bit
1824 if (optimize_size && op2 != const0_rtx)
1825 return false;
1827 if (op2 != const0_rtx)
1828 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1830 op2 = gen_reg_rtx (SImode);
1831 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1833 emit_insn (gen_rtx_SET (VOIDmode, dest,
1834 gen_rtx_XOR (SImode, op2, const1_rtx)));
1835 return true;
1837 case LE:
1838 if (GET_CODE (op2) != CONST_INT
1839 || INTVAL (op2) == 0x7ffffff)
1840 return false;
1841 op2 = GEN_INT (INTVAL (op2) + 1);
1842 return mep_expand_setcc_1 (LT, dest, op1, op2);
1844 case LEU:
1845 if (GET_CODE (op2) != CONST_INT
1846 || INTVAL (op2) == -1)
1847 return false;
1848 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1849 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1851 case GE:
1852 if (GET_CODE (op2) != CONST_INT
1853 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1854 return false;
1855 op2 = GEN_INT (INTVAL (op2) - 1);
1856 return mep_expand_setcc_1 (GT, dest, op1, op2);
1858 case GEU:
1859 if (GET_CODE (op2) != CONST_INT
1860 || op2 == const0_rtx)
1861 return false;
1862 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1863 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1865 default:
1866 gcc_unreachable ();
1870 bool
1871 mep_expand_setcc (rtx *operands)
1873 rtx dest = operands[0];
1874 enum rtx_code code = GET_CODE (operands[1]);
1875 rtx op0 = operands[2];
1876 rtx op1 = operands[3];
1878 return mep_expand_setcc_1 (code, dest, op0, op1);
1882 mep_expand_cbranch (rtx *operands)
1884 enum rtx_code code = GET_CODE (operands[0]);
1885 rtx op0 = operands[1];
1886 rtx op1 = operands[2];
1887 rtx tmp;
1889 restart:
1890 switch (code)
1892 case LT:
1893 if (mep_imm4_operand (op1, SImode))
1894 break;
1896 tmp = gen_reg_rtx (SImode);
1897 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1898 code = NE;
1899 op0 = tmp;
1900 op1 = const0_rtx;
1901 break;
1903 case GE:
1904 if (mep_imm4_operand (op1, SImode))
1905 break;
1907 tmp = gen_reg_rtx (SImode);
1908 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1910 code = EQ;
1911 op0 = tmp;
1912 op1 = const0_rtx;
1913 break;
1915 case EQ:
1916 case NE:
1917 if (! mep_reg_or_imm4_operand (op1, SImode))
1918 op1 = force_reg (SImode, op1);
1919 break;
1921 case LE:
1922 case GT:
1923 if (GET_CODE (op1) == CONST_INT
1924 && INTVAL (op1) != 0x7fffffff)
1926 op1 = GEN_INT (INTVAL (op1) + 1);
1927 code = (code == LE ? LT : GE);
1928 goto restart;
1931 tmp = gen_reg_rtx (SImode);
1932 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1934 code = (code == LE ? EQ : NE);
1935 op0 = tmp;
1936 op1 = const0_rtx;
1937 break;
1939 case LTU:
1940 if (op1 == const1_rtx)
1942 code = EQ;
1943 op1 = const0_rtx;
1944 break;
1947 tmp = gen_reg_rtx (SImode);
1948 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1949 code = NE;
1950 op0 = tmp;
1951 op1 = const0_rtx;
1952 break;
1954 case LEU:
1955 tmp = gen_reg_rtx (SImode);
1956 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1957 code = NE;
1958 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1959 code = EQ;
1960 else
1961 gcc_unreachable ();
1962 op0 = tmp;
1963 op1 = const0_rtx;
1964 break;
1966 case GTU:
1967 tmp = gen_reg_rtx (SImode);
1968 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1969 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1970 code = NE;
1971 op0 = tmp;
1972 op1 = const0_rtx;
1973 break;
1975 case GEU:
1976 tmp = gen_reg_rtx (SImode);
1977 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1978 code = NE;
1979 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1980 code = EQ;
1981 else
1982 gcc_unreachable ();
1983 op0 = tmp;
1984 op1 = const0_rtx;
1985 break;
1987 default:
1988 gcc_unreachable ();
1991 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1994 const char *
1995 mep_emit_cbranch (rtx *operands, int ne)
1997 if (GET_CODE (operands[1]) == REG)
1998 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1999 else if (INTVAL (operands[1]) == 0)
2000 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
2001 else
2002 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
2005 void
2006 mep_expand_call (rtx *operands, int returns_value)
2008 rtx addr = operands[returns_value];
2009 rtx tp = mep_tp_rtx ();
2010 rtx gp = mep_gp_rtx ();
2012 gcc_assert (GET_CODE (addr) == MEM);
2014 addr = XEXP (addr, 0);
2016 if (! mep_call_address_operand (addr, VOIDmode))
2017 addr = force_reg (SImode, addr);
2019 if (! operands[returns_value+2])
2020 operands[returns_value+2] = const0_rtx;
2022 if (returns_value)
2023 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
2024 operands[3], tp, gp));
2025 else
2026 emit_call_insn (gen_call_internal (addr, operands[1],
2027 operands[2], tp, gp));
2030 /* Aliasing Support. */
2032 /* If X is a machine specific address (i.e. a symbol or label being
2033 referenced as a displacement from the GOT implemented using an
2034 UNSPEC), then return the base term. Otherwise return X. */
2037 mep_find_base_term (rtx x)
2039 rtx base, term;
2040 int unspec;
2042 if (GET_CODE (x) != PLUS)
2043 return x;
2044 base = XEXP (x, 0);
2045 term = XEXP (x, 1);
2047 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2048 && base == mep_tp_rtx ())
2049 unspec = UNS_TPREL;
2050 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2051 && base == mep_gp_rtx ())
2052 unspec = UNS_GPREL;
2053 else
2054 return x;
2056 if (GET_CODE (term) != CONST)
2057 return x;
2058 term = XEXP (term, 0);
2060 if (GET_CODE (term) != UNSPEC
2061 || XINT (term, 1) != unspec)
2062 return x;
2064 return XVECEXP (term, 0, 0);
2067 /* Reload Support. */
2069 /* Return true if the registers in CLASS cannot represent the change from
2070 modes FROM to TO. */
2072 bool
2073 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2074 enum reg_class regclass)
2076 if (from == to)
2077 return false;
2079 /* 64-bit COP regs must remain 64-bit COP regs. */
2080 if (TARGET_64BIT_CR_REGS
2081 && (regclass == CR_REGS
2082 || regclass == LOADABLE_CR_REGS)
2083 && (GET_MODE_SIZE (to) < 8
2084 || GET_MODE_SIZE (from) < 8))
2085 return true;
2087 return false;
2090 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2092 static bool
2093 mep_general_reg (rtx x)
2095 while (GET_CODE (x) == SUBREG)
2096 x = XEXP (x, 0);
2097 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2100 static bool
2101 mep_nongeneral_reg (rtx x)
2103 while (GET_CODE (x) == SUBREG)
2104 x = XEXP (x, 0);
2105 return (GET_CODE (x) == REG
2106 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2109 static bool
2110 mep_general_copro_reg (rtx x)
2112 while (GET_CODE (x) == SUBREG)
2113 x = XEXP (x, 0);
2114 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2117 static bool
2118 mep_nonregister (rtx x)
2120 while (GET_CODE (x) == SUBREG)
2121 x = XEXP (x, 0);
2122 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2125 #define DEBUG_RELOAD 0
2127 /* Return the secondary reload class needed for moving value X to or
2128 from a register in coprocessor register class CLASS. */
2130 static enum reg_class
2131 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2133 if (mep_general_reg (x))
2134 /* We can do the move directly if mep_have_core_copro_moves_p,
2135 otherwise we need to go through memory. Either way, no secondary
2136 register is needed. */
2137 return NO_REGS;
2139 if (mep_general_copro_reg (x))
2141 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2142 if (mep_have_copro_copro_moves_p)
2143 return NO_REGS;
2145 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2146 if (mep_have_core_copro_moves_p)
2147 return GENERAL_REGS;
2149 /* Otherwise we need to do it through memory. No secondary
2150 register is needed. */
2151 return NO_REGS;
2154 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2155 && constraint_satisfied_p (x, CONSTRAINT_U))
2156 /* X is a memory value that we can access directly. */
2157 return NO_REGS;
2159 /* We have to move X into a GPR first and then copy it to
2160 the coprocessor register. The move from the GPR to the
2161 coprocessor might be done directly or through memory,
2162 depending on mep_have_core_copro_moves_p. */
2163 return GENERAL_REGS;
2166 /* Copying X to register in RCLASS. */
2169 mep_secondary_input_reload_class (enum reg_class rclass,
2170 enum machine_mode mode ATTRIBUTE_UNUSED,
2171 rtx x)
2173 int rv = NO_REGS;
2175 #if DEBUG_RELOAD
2176 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2177 debug_rtx (x);
2178 #endif
2180 if (reg_class_subset_p (rclass, CR_REGS))
2181 rv = mep_secondary_copro_reload_class (rclass, x);
2182 else if (MEP_NONGENERAL_CLASS (rclass)
2183 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2184 rv = GENERAL_REGS;
2186 #if DEBUG_RELOAD
2187 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2188 #endif
2189 return rv;
2192 /* Copying register in RCLASS to X. */
2195 mep_secondary_output_reload_class (enum reg_class rclass,
2196 enum machine_mode mode ATTRIBUTE_UNUSED,
2197 rtx x)
2199 int rv = NO_REGS;
2201 #if DEBUG_RELOAD
2202 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2203 debug_rtx (x);
2204 #endif
2206 if (reg_class_subset_p (rclass, CR_REGS))
2207 rv = mep_secondary_copro_reload_class (rclass, x);
2208 else if (MEP_NONGENERAL_CLASS (rclass)
2209 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2210 rv = GENERAL_REGS;
2212 #if DEBUG_RELOAD
2213 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2214 #endif
2216 return rv;
2219 /* Implement SECONDARY_MEMORY_NEEDED. */
2221 bool
2222 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2223 enum machine_mode mode ATTRIBUTE_UNUSED)
2225 if (!mep_have_core_copro_moves_p)
2227 if (reg_classes_intersect_p (rclass1, CR_REGS)
2228 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2229 return true;
2230 if (reg_classes_intersect_p (rclass2, CR_REGS)
2231 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2232 return true;
2233 if (!mep_have_copro_copro_moves_p
2234 && reg_classes_intersect_p (rclass1, CR_REGS)
2235 && reg_classes_intersect_p (rclass2, CR_REGS))
2236 return true;
2238 return false;
2241 void
2242 mep_expand_reload (rtx *operands, enum machine_mode mode)
2244 /* There are three cases for each direction:
2245 register, farsym
2246 control, farsym
2247 control, nearsym */
2249 int s0 = mep_section_tag (operands[0]) == 'f';
2250 int s1 = mep_section_tag (operands[1]) == 'f';
2251 int c0 = mep_nongeneral_reg (operands[0]);
2252 int c1 = mep_nongeneral_reg (operands[1]);
2253 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2255 #if DEBUG_RELOAD
2256 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2257 debug_rtx (operands[0]);
2258 debug_rtx (operands[1]);
2259 #endif
2261 switch (which)
2263 case 00: /* Don't know why this gets here. */
2264 case 02: /* general = far */
2265 emit_move_insn (operands[0], operands[1]);
2266 return;
2268 case 10: /* cr = mem */
2269 case 11: /* cr = cr */
2270 case 01: /* mem = cr */
2271 case 12: /* cr = far */
2272 emit_move_insn (operands[2], operands[1]);
2273 emit_move_insn (operands[0], operands[2]);
2274 return;
2276 case 20: /* far = general */
2277 emit_move_insn (operands[2], XEXP (operands[1], 0));
2278 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2279 return;
2281 case 21: /* far = cr */
2282 case 22: /* far = far */
2283 default:
2284 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2285 which, mode_name[mode]);
2286 debug_rtx (operands[0]);
2287 debug_rtx (operands[1]);
2288 gcc_unreachable ();
2292 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2293 can be moved directly into registers 0 to 7, but not into the rest.
2294 If so, and if the required class includes registers 0 to 7, restrict
2295 it to those registers. */
2297 enum reg_class
2298 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2300 switch (GET_CODE (x))
2302 case CONST_INT:
2303 if (INTVAL (x) >= 0x10000
2304 && INTVAL (x) < 0x01000000
2305 && (INTVAL (x) & 0xffff) != 0
2306 && reg_class_subset_p (TPREL_REGS, rclass))
2307 rclass = TPREL_REGS;
2308 break;
2310 case CONST:
2311 case SYMBOL_REF:
2312 case LABEL_REF:
2313 if (mep_section_tag (x) != 'f'
2314 && reg_class_subset_p (TPREL_REGS, rclass))
2315 rclass = TPREL_REGS;
2316 break;
2318 default:
2319 break;
2321 return rclass;
2324 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2325 moves, 4 for direct double-register moves, and 1000 for anything
2326 that requires a temporary register or temporary stack slot. */
2329 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2331 if (mep_have_copro_copro_moves_p
2332 && reg_class_subset_p (from, CR_REGS)
2333 && reg_class_subset_p (to, CR_REGS))
2335 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2336 return 4;
2337 return 2;
2339 if (reg_class_subset_p (from, CR_REGS)
2340 && reg_class_subset_p (to, CR_REGS))
2342 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2343 return 8;
2344 return 4;
2346 if (reg_class_subset_p (from, CR_REGS)
2347 || reg_class_subset_p (to, CR_REGS))
2349 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2350 return 4;
2351 return 2;
2353 if (mep_secondary_memory_needed (from, to, mode))
2354 return 1000;
2355 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2356 return 1000;
2358 if (GET_MODE_SIZE (mode) > 4)
2359 return 4;
2361 return 2;
2365 /* Functions to save and restore machine-specific function data. */
2367 static struct machine_function *
2368 mep_init_machine_status (void)
2370 struct machine_function *f;
2372 f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2374 return f;
2377 static rtx
2378 mep_allocate_initial_value (rtx reg)
2380 int rss;
2382 if (GET_CODE (reg) != REG)
2383 return NULL_RTX;
2385 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2386 return NULL_RTX;
2388 /* In interrupt functions, the "initial" values of $gp and $tp are
2389 provided by the prologue. They are not necessarily the same as
2390 the values that the caller was using. */
2391 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2392 if (mep_interrupt_p ())
2393 return NULL_RTX;
2395 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2397 cfun->machine->reg_save_size += 4;
2398 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2401 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2402 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2406 mep_return_addr_rtx (int count)
2408 if (count != 0)
2409 return const0_rtx;
2411 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2414 static rtx
2415 mep_tp_rtx (void)
2417 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2420 static rtx
2421 mep_gp_rtx (void)
2423 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2426 static bool
2427 mep_interrupt_p (void)
2429 if (cfun->machine->interrupt_handler == 0)
2431 int interrupt_handler
2432 = (lookup_attribute ("interrupt",
2433 DECL_ATTRIBUTES (current_function_decl))
2434 != NULL_TREE);
2435 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2437 return cfun->machine->interrupt_handler == 2;
2440 static bool
2441 mep_disinterrupt_p (void)
2443 if (cfun->machine->disable_interrupts == 0)
2445 int disable_interrupts
2446 = (lookup_attribute ("disinterrupt",
2447 DECL_ATTRIBUTES (current_function_decl))
2448 != NULL_TREE);
2449 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2451 return cfun->machine->disable_interrupts == 2;
2455 /* Frame/Epilog/Prolog Related. */
2457 static bool
2458 mep_reg_set_p (rtx reg, rtx insn)
2460 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2461 if (INSN_P (insn))
2463 if (FIND_REG_INC_NOTE (insn, reg))
2464 return true;
2465 insn = PATTERN (insn);
2468 if (GET_CODE (insn) == SET
2469 && GET_CODE (XEXP (insn, 0)) == REG
2470 && GET_CODE (XEXP (insn, 1)) == REG
2471 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2472 return false;
2474 return set_of (reg, insn) != NULL_RTX;
2478 #define MEP_SAVES_UNKNOWN 0
2479 #define MEP_SAVES_YES 1
2480 #define MEP_SAVES_MAYBE 2
2481 #define MEP_SAVES_NO 3
2483 static bool
2484 mep_reg_set_in_function (int regno)
2486 rtx reg, insn;
2488 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2489 return true;
2491 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2492 return true;
2494 push_topmost_sequence ();
2495 insn = get_insns ();
2496 pop_topmost_sequence ();
2498 if (!insn)
2499 return false;
2501 reg = gen_rtx_REG (SImode, regno);
2503 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2504 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2505 return true;
2506 return false;
2509 static bool
2510 mep_asm_without_operands_p (void)
2512 if (cfun->machine->asms_without_operands == 0)
2514 rtx insn;
2516 push_topmost_sequence ();
2517 insn = get_insns ();
2518 pop_topmost_sequence ();
2520 cfun->machine->asms_without_operands = 1;
2521 while (insn)
2523 if (INSN_P (insn)
2524 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2526 cfun->machine->asms_without_operands = 2;
2527 break;
2529 insn = NEXT_INSN (insn);
2533 return cfun->machine->asms_without_operands == 2;
2536 /* Interrupt functions save/restore every call-preserved register, and
2537 any call-used register it uses (or all if it calls any function,
2538 since they may get clobbered there too). Here we check to see
2539 which call-used registers need saving. */
2541 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2542 && (r == FIRST_CCR_REGNO + 1 \
2543 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2544 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2546 static bool
2547 mep_interrupt_saved_reg (int r)
2549 if (!mep_interrupt_p ())
2550 return false;
2551 if (r == REGSAVE_CONTROL_TEMP
2552 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2553 return true;
2554 if (mep_asm_without_operands_p ()
2555 && (!fixed_regs[r]
2556 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2557 || IVC2_ISAVED_REG (r)))
2558 return true;
2559 if (!current_function_is_leaf)
2560 /* Function calls mean we need to save $lp. */
2561 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2562 return true;
2563 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2564 /* The interrupt handler might use these registers for repeat blocks,
2565 or it might call a function that does so. */
2566 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2567 return true;
2568 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2569 return false;
2570 /* Functions we call might clobber these. */
2571 if (call_used_regs[r] && !fixed_regs[r])
2572 return true;
2573 /* Additional registers that need to be saved for IVC2. */
2574 if (IVC2_ISAVED_REG (r))
2575 return true;
2577 return false;
2580 static bool
2581 mep_call_saves_register (int r)
2583 if (! cfun->machine->frame_locked)
2585 int rv = MEP_SAVES_NO;
2587 if (cfun->machine->reg_save_slot[r])
2588 rv = MEP_SAVES_YES;
2589 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2590 rv = MEP_SAVES_YES;
2591 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2592 rv = MEP_SAVES_YES;
2593 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2594 rv = MEP_SAVES_YES;
2595 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2596 /* We need these to have stack slots so that they can be set during
2597 unwinding. */
2598 rv = MEP_SAVES_YES;
2599 else if (mep_interrupt_saved_reg (r))
2600 rv = MEP_SAVES_YES;
2601 cfun->machine->reg_saved[r] = rv;
2603 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2606 /* Return true if epilogue uses register REGNO. */
2608 bool
2609 mep_epilogue_uses (int regno)
2611 /* Since $lp is a call-saved register, the generic code will normally
2612 mark it used in the epilogue if it needs to be saved and restored.
2613 However, when profiling is enabled, the profiling code will implicitly
2614 clobber $11. This case has to be handled specially both here and in
2615 mep_call_saves_register. */
2616 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2617 return true;
2618 /* Interrupt functions save/restore pretty much everything. */
2619 return (reload_completed && mep_interrupt_saved_reg (regno));
2622 static int
2623 mep_reg_size (int regno)
2625 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2626 return 8;
2627 return 4;
2630 /* Worker function for TARGET_CAN_ELIMINATE. */
2632 bool
2633 mep_can_eliminate (const int from, const int to)
2635 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2636 ? ! frame_pointer_needed
2637 : true);
2641 mep_elimination_offset (int from, int to)
2643 int reg_save_size;
2644 int i;
2645 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2646 int total_size;
2648 if (!cfun->machine->frame_locked)
2649 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2651 /* We don't count arg_regs_to_save in the arg pointer offset, because
2652 gcc thinks the arg pointer has moved along with the saved regs.
2653 However, we do count it when we adjust $sp in the prologue. */
2654 reg_save_size = 0;
2655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2656 if (mep_call_saves_register (i))
2657 reg_save_size += mep_reg_size (i);
2659 if (reg_save_size % 8)
2660 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2661 else
2662 cfun->machine->regsave_filler = 0;
2664 /* This is what our total stack adjustment looks like. */
2665 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2667 if (total_size % 8)
2668 cfun->machine->frame_filler = 8 - (total_size % 8);
2669 else
2670 cfun->machine->frame_filler = 0;
2673 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2674 return reg_save_size + cfun->machine->regsave_filler;
2676 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2677 return cfun->machine->frame_filler + frame_size;
2679 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2680 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2682 gcc_unreachable ();
2685 static rtx
2686 F (rtx x)
2688 RTX_FRAME_RELATED_P (x) = 1;
2689 return x;
2692 /* Since the prologue/epilogue code is generated after optimization,
2693 we can't rely on gcc to split constants for us. So, this code
2694 captures all the ways to add a constant to a register in one logic
2695 chunk, including optimizing away insns we just don't need. This
2696 makes the prolog/epilog code easier to follow. */
2697 static void
2698 add_constant (int dest, int src, int value, int mark_frame)
2700 rtx insn;
2701 int hi, lo;
2703 if (src == dest && value == 0)
2704 return;
2706 if (value == 0)
2708 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2709 gen_rtx_REG (SImode, src));
2710 if (mark_frame)
2711 RTX_FRAME_RELATED_P(insn) = 1;
2712 return;
2715 if (value >= -32768 && value <= 32767)
2717 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2718 gen_rtx_REG (SImode, src),
2719 GEN_INT (value)));
2720 if (mark_frame)
2721 RTX_FRAME_RELATED_P(insn) = 1;
2722 return;
2725 /* Big constant, need to use a temp register. We use
2726 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2727 area is always small enough to directly add to). */
2729 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2730 lo = value & 0xffff;
2732 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2733 GEN_INT (hi));
2735 if (lo)
2737 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2738 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2739 GEN_INT (lo)));
2742 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2743 gen_rtx_REG (SImode, src),
2744 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2745 if (mark_frame)
2747 RTX_FRAME_RELATED_P(insn) = 1;
2748 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2749 gen_rtx_SET (SImode,
2750 gen_rtx_REG (SImode, dest),
2751 gen_rtx_PLUS (SImode,
2752 gen_rtx_REG (SImode, dest),
2753 GEN_INT (value))));
2757 static bool
2758 mep_function_uses_sp (void)
2760 rtx insn;
2761 struct sequence_stack *seq;
2762 rtx sp = gen_rtx_REG (SImode, SP_REGNO);
2764 insn = get_insns ();
2765 for (seq = crtl->emit.sequence_stack;
2766 seq;
2767 insn = seq->first, seq = seq->next);
2769 while (insn)
2771 if (mep_mentioned_p (insn, sp, 0))
2772 return true;
2773 insn = NEXT_INSN (insn);
2775 return false;
2778 /* Move SRC to DEST. Mark the move as being potentially dead if
2779 MAYBE_DEAD_P. */
2781 static rtx
2782 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2784 rtx insn = emit_move_insn (dest, src);
2785 #if 0
2786 if (maybe_dead_p)
2787 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2788 #endif
2789 return insn;
2792 /* Used for interrupt functions, which can't assume that $tp and $gp
2793 contain the correct pointers. */
2795 static void
2796 mep_reload_pointer (int regno, const char *symbol)
2798 rtx reg, sym;
2800 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2801 return;
2803 reg = gen_rtx_REG (SImode, regno);
2804 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2805 emit_insn (gen_movsi_topsym_s (reg, sym));
2806 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2809 /* Assign save slots for any register not already saved. DImode
2810 registers go at the end of the reg save area; the rest go at the
2811 beginning. This is for alignment purposes. Returns true if a frame
2812 is really needed. */
2813 static bool
2814 mep_assign_save_slots (int reg_save_size)
2816 bool really_need_stack_frame = false;
2817 int di_ofs = 0;
2818 int i;
2820 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2821 if (mep_call_saves_register(i))
2823 int regsize = mep_reg_size (i);
2825 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2826 || mep_reg_set_in_function (i))
2827 really_need_stack_frame = true;
2829 if (cfun->machine->reg_save_slot[i])
2830 continue;
2832 if (regsize < 8)
2834 cfun->machine->reg_save_size += regsize;
2835 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2837 else
2839 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2840 di_ofs += 8;
2843 cfun->machine->frame_locked = 1;
2844 return really_need_stack_frame;
2847 void
2848 mep_expand_prologue (void)
2850 int i, rss, sp_offset = 0;
2851 int reg_save_size;
2852 int frame_size;
2853 int really_need_stack_frame = frame_size;
2855 /* We must not allow register renaming in interrupt functions,
2856 because that invalidates the correctness of the set of call-used
2857 registers we're going to save/restore. */
2858 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2860 if (mep_disinterrupt_p ())
2861 emit_insn (gen_mep_disable_int ());
2863 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2865 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2866 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2868 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2870 sp_offset = reg_save_size;
2871 if (sp_offset + frame_size < 128)
2872 sp_offset += frame_size ;
2874 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2876 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2877 if (mep_call_saves_register(i))
2879 rtx mem;
2880 bool maybe_dead_p;
2881 enum machine_mode rmode;
2883 rss = cfun->machine->reg_save_slot[i];
2885 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2886 && (!mep_reg_set_in_function (i)
2887 && !mep_interrupt_p ()))
2888 continue;
2890 if (mep_reg_size (i) == 8)
2891 rmode = DImode;
2892 else
2893 rmode = SImode;
2895 /* If there is a pseudo associated with this register's initial value,
2896 reload might have already spilt it to the stack slot suggested by
2897 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2898 deleted as dead. */
2899 mem = gen_rtx_MEM (rmode,
2900 plus_constant (stack_pointer_rtx, sp_offset - rss));
2901 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2903 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2904 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2905 else if (rmode == DImode)
2907 rtx insn;
2908 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2910 mem = gen_rtx_MEM (SImode,
2911 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2913 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2914 gen_rtx_REG (SImode, i),
2915 maybe_dead_p);
2916 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2917 gen_rtx_ZERO_EXTRACT (SImode,
2918 gen_rtx_REG (DImode, i),
2919 GEN_INT (32),
2920 GEN_INT (32)),
2921 maybe_dead_p);
2922 insn = maybe_dead_move (mem,
2923 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2924 maybe_dead_p);
2925 RTX_FRAME_RELATED_P (insn) = 1;
2927 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2928 gen_rtx_SET (VOIDmode,
2929 copy_rtx (mem),
2930 gen_rtx_REG (rmode, i)));
2931 mem = gen_rtx_MEM (SImode,
2932 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2933 insn = maybe_dead_move (mem,
2934 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2935 maybe_dead_p);
2937 else
2939 rtx insn;
2940 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2941 gen_rtx_REG (rmode, i),
2942 maybe_dead_p);
2943 insn = maybe_dead_move (mem,
2944 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2945 maybe_dead_p);
2946 RTX_FRAME_RELATED_P (insn) = 1;
2948 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2949 gen_rtx_SET (VOIDmode,
2950 copy_rtx (mem),
2951 gen_rtx_REG (rmode, i)));
2955 if (frame_pointer_needed)
2957 /* We've already adjusted down by sp_offset. Total $sp change
2958 is reg_save_size + frame_size. We want a net change here of
2959 just reg_save_size. */
2960 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2963 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2965 if (mep_interrupt_p ())
2967 mep_reload_pointer(GP_REGNO, "__sdabase");
2968 mep_reload_pointer(TP_REGNO, "__tpbase");
2972 static void
2973 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2975 int local = hwi_local;
2976 int frame_size = local + crtl->outgoing_args_size;
2977 int reg_save_size;
2978 int ffill;
2979 int i, sp, skip;
2980 int sp_offset;
2981 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2983 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2984 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2985 sp_offset = reg_save_size + frame_size;
2987 ffill = cfun->machine->frame_filler;
2989 if (cfun->machine->mep_frame_pointer_needed)
2990 reg_names[FP_REGNO] = "$fp";
2991 else
2992 reg_names[FP_REGNO] = "$8";
2994 if (sp_offset == 0)
2995 return;
2997 if (debug_info_level == DINFO_LEVEL_NONE)
2999 fprintf (file, "\t# frame: %d", sp_offset);
3000 if (reg_save_size)
3001 fprintf (file, " %d regs", reg_save_size);
3002 if (local)
3003 fprintf (file, " %d locals", local);
3004 if (crtl->outgoing_args_size)
3005 fprintf (file, " %d args", crtl->outgoing_args_size);
3006 fprintf (file, "\n");
3007 return;
3010 fprintf (file, "\t#\n");
3011 fprintf (file, "\t# Initial Frame Information:\n");
3012 if (sp_offset || !frame_pointer_needed)
3013 fprintf (file, "\t# Entry ---------- 0\n");
3015 /* Sort registers by save slots, so they're printed in the order
3016 they appear in memory, not the order they're saved in. */
3017 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
3018 slot_map[si] = si;
3019 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
3020 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
3021 if (cfun->machine->reg_save_slot[slot_map[si]]
3022 > cfun->machine->reg_save_slot[slot_map[sj]])
3024 int t = slot_map[si];
3025 slot_map[si] = slot_map[sj];
3026 slot_map[sj] = t;
3029 sp = 0;
3030 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3032 int rsize;
3033 int r = slot_map[i];
3034 int rss = cfun->machine->reg_save_slot[r];
3036 if (!mep_call_saves_register (r))
3037 continue;
3039 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
3040 && (!mep_reg_set_in_function (r)
3041 && !mep_interrupt_p ()))
3042 continue;
3044 rsize = mep_reg_size(r);
3045 skip = rss - (sp+rsize);
3046 if (skip)
3047 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3048 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
3049 rsize, reg_names[r], sp_offset - rss);
3050 sp = rss;
3053 skip = reg_save_size - sp;
3054 if (skip)
3055 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3057 if (frame_pointer_needed)
3058 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3059 if (local)
3060 fprintf (file, "\t# %3d bytes for local vars\n", local);
3061 if (ffill)
3062 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3063 if (crtl->outgoing_args_size)
3064 fprintf (file, "\t# %3d bytes for outgoing args\n",
3065 crtl->outgoing_args_size);
3066 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3067 fprintf (file, "\t#\n");
3071 static int mep_prevent_lp_restore = 0;
3072 static int mep_sibcall_epilogue = 0;
3074 void
3075 mep_expand_epilogue (void)
3077 int i, sp_offset = 0;
3078 int reg_save_size = 0;
3079 int frame_size;
3080 int lp_temp = LP_REGNO, lp_slot = -1;
3081 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3082 int interrupt_handler = mep_interrupt_p ();
3084 if (profile_arc_flag == 2)
3085 emit_insn (gen_mep_bb_trace_ret ());
3087 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3088 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3090 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3092 if (frame_pointer_needed)
3094 /* If we have a frame pointer, we won't have a reliable stack
3095 pointer (alloca, you know), so rebase SP from FP */
3096 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3097 gen_rtx_REG (SImode, FP_REGNO));
3098 sp_offset = reg_save_size;
3100 else
3102 /* SP is right under our local variable space. Adjust it if
3103 needed. */
3104 sp_offset = reg_save_size + frame_size;
3105 if (sp_offset >= 128)
3107 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3108 sp_offset -= frame_size;
3112 /* This is backwards so that we restore the control and coprocessor
3113 registers before the temporary registers we use to restore
3114 them. */
3115 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3116 if (mep_call_saves_register (i))
3118 enum machine_mode rmode;
3119 int rss = cfun->machine->reg_save_slot[i];
3121 if (mep_reg_size (i) == 8)
3122 rmode = DImode;
3123 else
3124 rmode = SImode;
3126 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3127 && !(mep_reg_set_in_function (i) || interrupt_handler))
3128 continue;
3129 if (mep_prevent_lp_restore && i == LP_REGNO)
3130 continue;
3131 if (!mep_prevent_lp_restore
3132 && !interrupt_handler
3133 && (i == 10 || i == 11))
3134 continue;
3136 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3137 emit_move_insn (gen_rtx_REG (rmode, i),
3138 gen_rtx_MEM (rmode,
3139 plus_constant (stack_pointer_rtx,
3140 sp_offset-rss)));
3141 else
3143 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3144 /* Defer this one so we can jump indirect rather than
3145 copying the RA to $lp and "ret". EH epilogues
3146 automatically skip this anyway. */
3147 lp_slot = sp_offset-rss;
3148 else
3150 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3151 gen_rtx_MEM (rmode,
3152 plus_constant (stack_pointer_rtx,
3153 sp_offset-rss)));
3154 emit_move_insn (gen_rtx_REG (rmode, i),
3155 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3159 if (lp_slot != -1)
3161 /* Restore this one last so we know it will be in the temp
3162 register when we return by jumping indirectly via the temp. */
3163 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3164 gen_rtx_MEM (SImode,
3165 plus_constant (stack_pointer_rtx,
3166 lp_slot)));
3167 lp_temp = REGSAVE_CONTROL_TEMP;
3171 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3173 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3174 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3175 gen_rtx_REG (SImode, SP_REGNO),
3176 cfun->machine->eh_stack_adjust));
3178 if (mep_sibcall_epilogue)
3179 return;
3181 if (mep_disinterrupt_p ())
3182 emit_insn (gen_mep_enable_int ());
3184 if (mep_prevent_lp_restore)
3186 emit_jump_insn (gen_eh_return_internal ());
3187 emit_barrier ();
3189 else if (interrupt_handler)
3190 emit_jump_insn (gen_mep_reti ());
3191 else
3192 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3195 void
3196 mep_expand_eh_return (rtx *operands)
3198 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3200 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3201 emit_move_insn (ra, operands[0]);
3202 operands[0] = ra;
3205 emit_insn (gen_eh_epilogue (operands[0]));
3208 void
3209 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3211 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3212 mep_prevent_lp_restore = 1;
3213 mep_expand_epilogue ();
3214 mep_prevent_lp_restore = 0;
3217 void
3218 mep_expand_sibcall_epilogue (void)
3220 mep_sibcall_epilogue = 1;
3221 mep_expand_epilogue ();
3222 mep_sibcall_epilogue = 0;
3225 static bool
3226 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3228 if (decl == NULL)
3229 return false;
3231 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3232 return false;
3234 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3235 if (mep_interrupt_p () || mep_disinterrupt_p ())
3236 return false;
3238 return true;
3242 mep_return_stackadj_rtx (void)
3244 return gen_rtx_REG (SImode, 10);
3248 mep_return_handler_rtx (void)
3250 return gen_rtx_REG (SImode, LP_REGNO);
3253 void
3254 mep_function_profiler (FILE *file)
3256 /* Always right at the beginning of the function. */
3257 fprintf (file, "\t# mep function profiler\n");
3258 fprintf (file, "\tadd\t$sp, -8\n");
3259 fprintf (file, "\tsw\t$0, ($sp)\n");
3260 fprintf (file, "\tldc\t$0, $lp\n");
3261 fprintf (file, "\tsw\t$0, 4($sp)\n");
3262 fprintf (file, "\tbsr\t__mep_mcount\n");
3263 fprintf (file, "\tlw\t$0, 4($sp)\n");
3264 fprintf (file, "\tstc\t$0, $lp\n");
3265 fprintf (file, "\tlw\t$0, ($sp)\n");
3266 fprintf (file, "\tadd\t$sp, 8\n\n");
3269 const char *
3270 mep_emit_bb_trace_ret (void)
3272 fprintf (asm_out_file, "\t# end of block profiling\n");
3273 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3274 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3275 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3276 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3277 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3278 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3279 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3280 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3281 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3282 return "";
3285 #undef SAVE
3286 #undef RESTORE
3288 /* Operand Printing. */
3290 void
3291 mep_print_operand_address (FILE *stream, rtx address)
3293 if (GET_CODE (address) == MEM)
3294 address = XEXP (address, 0);
3295 else
3296 /* cf: gcc.dg/asm-4.c. */
3297 gcc_assert (GET_CODE (address) == REG);
3299 mep_print_operand (stream, address, 0);
3302 static struct
3304 char code;
3305 const char *pattern;
3306 const char *format;
3308 const conversions[] =
3310 { 0, "r", "0" },
3311 { 0, "m+ri", "3(2)" },
3312 { 0, "mr", "(1)" },
3313 { 0, "ms", "(1)" },
3314 { 0, "ml", "(1)" },
3315 { 0, "mLrs", "%lo(3)(2)" },
3316 { 0, "mLr+si", "%lo(4+5)(2)" },
3317 { 0, "m+ru2s", "%tpoff(5)(2)" },
3318 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3319 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3320 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3321 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3322 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3323 { 0, "mi", "(1)" },
3324 { 0, "m+si", "(2+3)" },
3325 { 0, "m+li", "(2+3)" },
3326 { 0, "i", "0" },
3327 { 0, "s", "0" },
3328 { 0, "+si", "1+2" },
3329 { 0, "+u2si", "%tpoff(3+4)" },
3330 { 0, "+u3si", "%sdaoff(3+4)" },
3331 { 0, "l", "0" },
3332 { 'b', "i", "0" },
3333 { 'B', "i", "0" },
3334 { 'U', "i", "0" },
3335 { 'h', "i", "0" },
3336 { 'h', "Hs", "%hi(1)" },
3337 { 'I', "i", "0" },
3338 { 'I', "u2s", "%tpoff(2)" },
3339 { 'I', "u3s", "%sdaoff(2)" },
3340 { 'I', "+u2si", "%tpoff(3+4)" },
3341 { 'I', "+u3si", "%sdaoff(3+4)" },
3342 { 'J', "i", "0" },
3343 { 'P', "mr", "(1\\+),\\0" },
3344 { 'x', "i", "0" },
3345 { 0, 0, 0 }
3348 static int
3349 unique_bit_in (HOST_WIDE_INT i)
3351 switch (i & 0xff)
3353 case 0x01: case 0xfe: return 0;
3354 case 0x02: case 0xfd: return 1;
3355 case 0x04: case 0xfb: return 2;
3356 case 0x08: case 0xf7: return 3;
3357 case 0x10: case 0x7f: return 4;
3358 case 0x20: case 0xbf: return 5;
3359 case 0x40: case 0xdf: return 6;
3360 case 0x80: case 0xef: return 7;
3361 default:
3362 gcc_unreachable ();
3366 static int
3367 bit_size_for_clip (HOST_WIDE_INT i)
3369 int rv;
3371 for (rv = 0; rv < 31; rv ++)
3372 if (((HOST_WIDE_INT) 1 << rv) > i)
3373 return rv + 1;
3374 gcc_unreachable ();
3377 /* Print an operand to a assembler instruction. */
3379 void
3380 mep_print_operand (FILE *file, rtx x, int code)
3382 int i, j;
3383 const char *real_name;
3385 if (code == '<')
3387 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3388 we're using, then skip over the "mep_" part of its name. */
3389 const struct cgen_insn *insn;
3391 if (mep_get_move_insn (mep_cmov, &insn))
3392 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3393 else
3394 mep_intrinsic_unavailable (mep_cmov);
3395 return;
3397 if (code == 'L')
3399 switch (GET_CODE (x))
3401 case AND:
3402 fputs ("clr", file);
3403 return;
3404 case IOR:
3405 fputs ("set", file);
3406 return;
3407 case XOR:
3408 fputs ("not", file);
3409 return;
3410 default:
3411 output_operand_lossage ("invalid %%L code");
3414 if (code == 'M')
3416 /* Print the second operand of a CR <- CR move. If we're using
3417 a two-operand instruction (i.e., a real cmov), then just print
3418 the operand normally. If we're using a "reg, reg, immediate"
3419 instruction such as caddi3, print the operand followed by a
3420 zero field. If we're using a three-register instruction,
3421 print the operand twice. */
3422 const struct cgen_insn *insn;
3424 mep_print_operand (file, x, 0);
3425 if (mep_get_move_insn (mep_cmov, &insn)
3426 && insn_data[insn->icode].n_operands == 3)
3428 fputs (", ", file);
3429 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3430 mep_print_operand (file, x, 0);
3431 else
3432 mep_print_operand (file, const0_rtx, 0);
3434 return;
3437 encode_pattern (x);
3438 for (i = 0; conversions[i].pattern; i++)
3439 if (conversions[i].code == code
3440 && strcmp(conversions[i].pattern, pattern) == 0)
3442 for (j = 0; conversions[i].format[j]; j++)
3443 if (conversions[i].format[j] == '\\')
3445 fputc (conversions[i].format[j+1], file);
3446 j++;
3448 else if (ISDIGIT(conversions[i].format[j]))
3450 rtx r = patternr[conversions[i].format[j] - '0'];
3451 switch (GET_CODE (r))
3453 case REG:
3454 fprintf (file, "%s", reg_names [REGNO (r)]);
3455 break;
3456 case CONST_INT:
3457 switch (code)
3459 case 'b':
3460 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3461 break;
3462 case 'B':
3463 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3464 break;
3465 case 'h':
3466 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3467 break;
3468 case 'U':
3469 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3470 break;
3471 case 'J':
3472 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3473 break;
3474 case 'x':
3475 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3476 && !(INTVAL (r) & 0xff))
3477 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3478 else
3479 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3480 break;
3481 case 'I':
3482 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3483 && conversions[i].format[j+1] == 0)
3485 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3486 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3488 else
3489 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3490 break;
3491 default:
3492 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3493 break;
3495 break;
3496 case CONST_DOUBLE:
3497 fprintf(file, "[const_double 0x%lx]",
3498 (unsigned long) CONST_DOUBLE_HIGH(r));
3499 break;
3500 case SYMBOL_REF:
3501 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3502 assemble_name (file, real_name);
3503 break;
3504 case LABEL_REF:
3505 output_asm_label (r);
3506 break;
3507 default:
3508 fprintf (stderr, "don't know how to print this operand:");
3509 debug_rtx (r);
3510 gcc_unreachable ();
3513 else
3515 if (conversions[i].format[j] == '+'
3516 && (!code || code == 'I')
3517 && ISDIGIT (conversions[i].format[j+1])
3518 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3519 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3520 continue;
3521 fputc(conversions[i].format[j], file);
3523 break;
3525 if (!conversions[i].pattern)
3527 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3528 debug_rtx(x);
3531 return;
3534 void
3535 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3536 int noperands ATTRIBUTE_UNUSED)
3538 /* Despite the fact that MeP is perfectly capable of branching and
3539 doing something else in the same bundle, gcc does jump
3540 optimization *after* scheduling, so we cannot trust the bundling
3541 flags on jump instructions. */
3542 if (GET_MODE (insn) == BImode
3543 && get_attr_slots (insn) != SLOTS_CORE)
3544 fputc ('+', asm_out_file);
3547 /* Function args in registers. */
3549 static void
3550 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3551 enum machine_mode mode ATTRIBUTE_UNUSED,
3552 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3553 int second_time ATTRIBUTE_UNUSED)
3555 int nsave = 4 - (cum->nregs + 1);
3557 if (nsave > 0)
3558 cfun->machine->arg_regs_to_save = nsave;
3559 *pretend_size = nsave * 4;
3562 static int
3563 bytesize (const_tree type, enum machine_mode mode)
3565 if (mode == BLKmode)
3566 return int_size_in_bytes (type);
3567 return GET_MODE_SIZE (mode);
3570 static rtx
3571 mep_expand_builtin_saveregs (void)
3573 int bufsize, i, ns;
3574 rtx regbuf;
3576 ns = cfun->machine->arg_regs_to_save;
3577 if (TARGET_IVC2)
3579 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3580 regbuf = assign_stack_local (SImode, bufsize, 64);
3582 else
3584 bufsize = ns * 4;
3585 regbuf = assign_stack_local (SImode, bufsize, 32);
3588 move_block_from_reg (5-ns, regbuf, ns);
3590 if (TARGET_IVC2)
3592 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3593 int ofs = 8 * ((ns+1)/2);
3595 for (i=0; i<ns; i++)
3597 int rn = (4-ns) + i + 49;
3598 rtx ptr;
3600 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3601 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3602 ofs += 8;
3605 return XEXP (regbuf, 0);
3608 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3610 static tree
3611 mep_build_builtin_va_list (void)
3613 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3614 tree record;
3617 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3619 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3620 get_identifier ("__va_next_gp"), ptr_type_node);
3621 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3622 get_identifier ("__va_next_gp_limit"),
3623 ptr_type_node);
3624 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3625 ptr_type_node);
3626 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3627 ptr_type_node);
3629 DECL_FIELD_CONTEXT (f_next_gp) = record;
3630 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3631 DECL_FIELD_CONTEXT (f_next_cop) = record;
3632 DECL_FIELD_CONTEXT (f_next_stack) = record;
3634 TYPE_FIELDS (record) = f_next_gp;
3635 TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3636 TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3637 TREE_CHAIN (f_next_cop) = f_next_stack;
3639 layout_type (record);
3641 return record;
3644 static void
3645 mep_expand_va_start (tree valist, rtx nextarg)
3647 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3648 tree next_gp, next_gp_limit, next_cop, next_stack;
3649 tree t, u;
3650 int ns;
3652 ns = cfun->machine->arg_regs_to_save;
3654 f_next_gp = TYPE_FIELDS (va_list_type_node);
3655 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3656 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3657 f_next_stack = TREE_CHAIN (f_next_cop);
3659 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3660 NULL_TREE);
3661 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3662 valist, f_next_gp_limit, NULL_TREE);
3663 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3664 NULL_TREE);
3665 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3666 valist, f_next_stack, NULL_TREE);
3668 /* va_list.next_gp = expand_builtin_saveregs (); */
3669 u = make_tree (sizetype, expand_builtin_saveregs ());
3670 u = fold_convert (ptr_type_node, u);
3671 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3672 TREE_SIDE_EFFECTS (t) = 1;
3673 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3675 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3676 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3677 size_int (4 * ns));
3678 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3679 TREE_SIDE_EFFECTS (t) = 1;
3680 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3682 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3683 size_int (8 * ((ns+1)/2)));
3684 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3685 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3686 TREE_SIDE_EFFECTS (t) = 1;
3687 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3689 /* va_list.next_stack = nextarg; */
3690 u = make_tree (ptr_type_node, nextarg);
3691 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3692 TREE_SIDE_EFFECTS (t) = 1;
3693 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3696 static tree
3697 mep_gimplify_va_arg_expr (tree valist, tree type,
3698 tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
3700 HOST_WIDE_INT size, rsize;
3701 bool by_reference, ivc2_vec;
3702 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3703 tree next_gp, next_gp_limit, next_cop, next_stack;
3704 tree label_sover, label_selse;
3705 tree tmp, res_addr;
3707 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3709 size = int_size_in_bytes (type);
3710 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3712 if (by_reference)
3714 type = build_pointer_type (type);
3715 size = 4;
3717 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3719 f_next_gp = TYPE_FIELDS (va_list_type_node);
3720 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3721 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3722 f_next_stack = TREE_CHAIN (f_next_cop);
3724 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3725 NULL_TREE);
3726 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3727 valist, f_next_gp_limit, NULL_TREE);
3728 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3729 NULL_TREE);
3730 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3731 valist, f_next_stack, NULL_TREE);
3733 /* if f_next_gp < f_next_gp_limit
3734 IF (VECTOR_P && IVC2)
3735 val = *f_next_cop;
3736 ELSE
3737 val = *f_next_gp;
3738 f_next_gp += 4;
3739 f_next_cop += 8;
3740 else
3741 label_selse:
3742 val = *f_next_stack;
3743 f_next_stack += rsize;
3744 label_sover:
3747 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3748 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3749 res_addr = create_tmp_var (ptr_type_node, NULL);
3751 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3752 unshare_expr (next_gp_limit));
3753 tmp = build3 (COND_EXPR, void_type_node, tmp,
3754 build1 (GOTO_EXPR, void_type_node,
3755 unshare_expr (label_selse)),
3756 NULL_TREE);
3757 gimplify_and_add (tmp, pre_p);
3759 if (ivc2_vec)
3761 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3762 gimplify_and_add (tmp, pre_p);
3764 else
3766 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3767 gimplify_and_add (tmp, pre_p);
3770 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3771 unshare_expr (next_gp), size_int (4));
3772 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3774 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3775 unshare_expr (next_cop), size_int (8));
3776 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3778 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3779 gimplify_and_add (tmp, pre_p);
3781 /* - - */
3783 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3784 gimplify_and_add (tmp, pre_p);
3786 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3787 gimplify_and_add (tmp, pre_p);
3789 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3790 unshare_expr (next_stack), size_int (rsize));
3791 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3793 /* - - */
3795 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3796 gimplify_and_add (tmp, pre_p);
3798 res_addr = fold_convert (build_pointer_type (type), res_addr);
3800 if (by_reference)
3801 res_addr = build_va_arg_indirect_ref (res_addr);
3803 return build_va_arg_indirect_ref (res_addr);
3806 void
3807 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3808 rtx libname ATTRIBUTE_UNUSED,
3809 tree fndecl ATTRIBUTE_UNUSED)
3811 pcum->nregs = 0;
3813 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3814 pcum->vliw = 1;
3815 else
3816 pcum->vliw = 0;
3820 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3821 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3823 /* VOIDmode is a signal for the backend to pass data to the call
3824 expander via the second operand to the call pattern. We use
3825 this to determine whether to use "jsr" or "jsrv". */
3826 if (mode == VOIDmode)
3827 return GEN_INT (cum.vliw);
3829 /* If we havn't run out of argument registers, return the next. */
3830 if (cum.nregs < 4)
3832 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3833 return gen_rtx_REG (mode, cum.nregs + 49);
3834 else
3835 return gen_rtx_REG (mode, cum.nregs + 1);
3838 /* Otherwise the argument goes on the stack. */
3839 return NULL_RTX;
3842 static bool
3843 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3844 enum machine_mode mode,
3845 const_tree type,
3846 bool named ATTRIBUTE_UNUSED)
3848 int size = bytesize (type, mode);
3850 /* This is non-obvious, but yes, large values passed after we've run
3851 out of registers are *still* passed by reference - we put the
3852 address of the parameter on the stack, as well as putting the
3853 parameter itself elsewhere on the stack. */
3855 if (size <= 0 || size > 8)
3856 return true;
3857 if (size <= 4)
3858 return false;
3859 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3860 return false;
3861 return true;
3864 void
3865 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3866 enum machine_mode mode ATTRIBUTE_UNUSED,
3867 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3869 pcum->nregs += 1;
3872 bool
3873 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3875 int size = bytesize (type, BLKmode);
3876 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3877 return size > 0 && size <= 8 ? 0 : 1;
3878 return size > 0 && size <= 4 ? 0 : 1;
3881 static bool
3882 mep_narrow_volatile_bitfield (void)
3884 return true;
3885 return false;
3888 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3891 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3893 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3894 return gen_rtx_REG (TYPE_MODE (type), 48);
3895 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3898 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3901 mep_libcall_value (enum machine_mode mode)
3903 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3906 /* Handle pipeline hazards. */
3908 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3909 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3911 static int prev_opcode = 0;
3913 /* This isn't as optimal as it could be, because we don't know what
3914 control register the STC opcode is storing in. We only need to add
3915 the nop if it's the relevent register, but we add it for irrelevent
3916 registers also. */
3918 void
3919 mep_asm_output_opcode (FILE *file, const char *ptr)
3921 int this_opcode = op_none;
3922 const char *hazard = 0;
3924 switch (*ptr)
3926 case 'f':
3927 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3928 this_opcode = op_fsft;
3929 break;
3930 case 'r':
3931 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3932 this_opcode = op_ret;
3933 break;
3934 case 's':
3935 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3936 this_opcode = op_stc;
3937 break;
3940 if (prev_opcode == op_stc && this_opcode == op_fsft)
3941 hazard = "nop";
3942 if (prev_opcode == op_stc && this_opcode == op_ret)
3943 hazard = "nop";
3945 if (hazard)
3946 fprintf(file, "%s\t# %s-%s hazard\n\t",
3947 hazard, opnames[prev_opcode], opnames[this_opcode]);
3949 prev_opcode = this_opcode;
3952 /* Handle attributes. */
3954 static tree
3955 mep_validate_based_tiny (tree *node, tree name, tree args,
3956 int flags ATTRIBUTE_UNUSED, bool *no_add)
3958 if (TREE_CODE (*node) != VAR_DECL
3959 && TREE_CODE (*node) != POINTER_TYPE
3960 && TREE_CODE (*node) != TYPE_DECL)
3962 warning (0, "%qE attribute only applies to variables", name);
3963 *no_add = true;
3965 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3967 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3969 warning (0, "address region attributes not allowed with auto storage class");
3970 *no_add = true;
3972 /* Ignore storage attribute of pointed to variable: char __far * x; */
3973 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3975 warning (0, "address region attributes on pointed-to types ignored");
3976 *no_add = true;
3980 return NULL_TREE;
3983 static int
3984 mep_multiple_address_regions (tree list, bool check_section_attr)
3986 tree a;
3987 int count_sections = 0;
3988 int section_attr_count = 0;
3990 for (a = list; a; a = TREE_CHAIN (a))
3992 if (is_attribute_p ("based", TREE_PURPOSE (a))
3993 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3994 || is_attribute_p ("near", TREE_PURPOSE (a))
3995 || is_attribute_p ("far", TREE_PURPOSE (a))
3996 || is_attribute_p ("io", TREE_PURPOSE (a)))
3997 count_sections ++;
3998 if (check_section_attr)
3999 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
4002 if (check_section_attr)
4003 return section_attr_count;
4004 else
4005 return count_sections;
4008 #define MEP_ATTRIBUTES(decl) \
4009 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
4010 : DECL_ATTRIBUTES (decl) \
4011 ? (DECL_ATTRIBUTES (decl)) \
4012 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
4014 static tree
4015 mep_validate_near_far (tree *node, tree name, tree args,
4016 int flags ATTRIBUTE_UNUSED, bool *no_add)
4018 if (TREE_CODE (*node) != VAR_DECL
4019 && TREE_CODE (*node) != FUNCTION_DECL
4020 && TREE_CODE (*node) != METHOD_TYPE
4021 && TREE_CODE (*node) != POINTER_TYPE
4022 && TREE_CODE (*node) != TYPE_DECL)
4024 warning (0, "%qE attribute only applies to variables and functions",
4025 name);
4026 *no_add = true;
4028 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
4030 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
4032 warning (0, "address region attributes not allowed with auto storage class");
4033 *no_add = true;
4035 /* Ignore storage attribute of pointed to variable: char __far * x; */
4036 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
4038 warning (0, "address region attributes on pointed-to types ignored");
4039 *no_add = true;
4042 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
4044 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4045 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
4046 DECL_ATTRIBUTES (*node) = NULL_TREE;
4048 return NULL_TREE;
4051 static tree
4052 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4053 int flags ATTRIBUTE_UNUSED, bool *no_add)
4055 if (TREE_CODE (*node) != FUNCTION_DECL
4056 && TREE_CODE (*node) != METHOD_TYPE)
4058 warning (0, "%qE attribute only applies to functions", name);
4059 *no_add = true;
4061 return NULL_TREE;
4064 static tree
4065 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4066 int flags ATTRIBUTE_UNUSED, bool *no_add)
4068 tree function_type;
4070 if (TREE_CODE (*node) != FUNCTION_DECL)
4072 warning (0, "%qE attribute only applies to functions", name);
4073 *no_add = true;
4074 return NULL_TREE;
4077 if (DECL_DECLARED_INLINE_P (*node))
4078 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4079 DECL_UNINLINABLE (*node) = 1;
4081 function_type = TREE_TYPE (*node);
4083 if (TREE_TYPE (function_type) != void_type_node)
4084 error ("interrupt function must have return type of void");
4086 if (TYPE_ARG_TYPES (function_type)
4087 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4088 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4089 error ("interrupt function must have no arguments");
4091 return NULL_TREE;
4094 static tree
4095 mep_validate_io_cb (tree *node, tree name, tree args,
4096 int flags ATTRIBUTE_UNUSED, bool *no_add)
4098 if (TREE_CODE (*node) != VAR_DECL)
4100 warning (0, "%qE attribute only applies to variables", name);
4101 *no_add = true;
4104 if (args != NULL_TREE)
4106 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4107 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4108 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4110 warning (0, "%qE attribute allows only an integer constant argument",
4111 name);
4112 *no_add = true;
4116 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4117 TREE_THIS_VOLATILE (*node) = 1;
4119 return NULL_TREE;
4122 static tree
4123 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4124 int flags ATTRIBUTE_UNUSED, bool *no_add)
4126 if (TREE_CODE (*node) != FUNCTION_TYPE
4127 && TREE_CODE (*node) != FUNCTION_DECL
4128 && TREE_CODE (*node) != METHOD_TYPE
4129 && TREE_CODE (*node) != FIELD_DECL
4130 && TREE_CODE (*node) != TYPE_DECL)
4132 static int gave_pointer_note = 0;
4133 static int gave_array_note = 0;
4134 static const char * given_type = NULL;
4136 given_type = tree_code_name[TREE_CODE (*node)];
4137 if (TREE_CODE (*node) == POINTER_TYPE)
4138 given_type = "pointers";
4139 if (TREE_CODE (*node) == ARRAY_TYPE)
4140 given_type = "arrays";
4142 if (given_type)
4143 warning (0, "%qE attribute only applies to functions, not %s",
4144 name, given_type);
4145 else
4146 warning (0, "%qE attribute only applies to functions",
4147 name);
4148 *no_add = true;
4150 if (TREE_CODE (*node) == POINTER_TYPE
4151 && !gave_pointer_note)
4153 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4154 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4155 gave_pointer_note = 1;
4158 if (TREE_CODE (*node) == ARRAY_TYPE
4159 && !gave_array_note)
4161 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4162 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4163 gave_array_note = 1;
4166 if (!TARGET_VLIW)
4167 error ("VLIW functions are not allowed without a VLIW configuration");
4168 return NULL_TREE;
4171 static const struct attribute_spec mep_attribute_table[11] =
4173 /* name min max decl type func handler */
4174 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4175 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4176 { "near", 0, 0, false, false, false, mep_validate_near_far },
4177 { "far", 0, 0, false, false, false, mep_validate_near_far },
4178 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4179 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4180 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4181 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4182 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4183 { NULL, 0, 0, false, false, false, NULL }
4186 static bool
4187 mep_function_attribute_inlinable_p (const_tree callee)
4189 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4190 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4191 return (lookup_attribute ("disinterrupt", attrs) == 0
4192 && lookup_attribute ("interrupt", attrs) == 0);
4195 static bool
4196 mep_can_inline_p (tree caller, tree callee)
4198 if (TREE_CODE (callee) == ADDR_EXPR)
4199 callee = TREE_OPERAND (callee, 0);
4201 if (!mep_vliw_function_p (caller)
4202 && mep_vliw_function_p (callee))
4204 return false;
4206 return true;
4209 #define FUNC_CALL 1
4210 #define FUNC_DISINTERRUPT 2
4213 struct GTY(()) pragma_entry {
4214 int used;
4215 int flag;
4216 const char *funcname;
4218 typedef struct pragma_entry pragma_entry;
4220 /* Hash table of farcall-tagged sections. */
4221 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4223 static int
4224 pragma_entry_eq (const void *p1, const void *p2)
4226 const pragma_entry *old = (const pragma_entry *) p1;
4227 const char *new_name = (const char *) p2;
4229 return strcmp (old->funcname, new_name) == 0;
4232 static hashval_t
4233 pragma_entry_hash (const void *p)
4235 const pragma_entry *old = (const pragma_entry *) p;
4236 return htab_hash_string (old->funcname);
4239 static void
4240 mep_note_pragma_flag (const char *funcname, int flag)
4242 pragma_entry **slot;
4244 if (!pragma_htab)
4245 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4246 pragma_entry_eq, NULL);
4248 slot = (pragma_entry **)
4249 htab_find_slot_with_hash (pragma_htab, funcname,
4250 htab_hash_string (funcname), INSERT);
4252 if (!*slot)
4254 *slot = GGC_NEW (pragma_entry);
4255 (*slot)->flag = 0;
4256 (*slot)->used = 0;
4257 (*slot)->funcname = ggc_strdup (funcname);
4259 (*slot)->flag |= flag;
4262 static bool
4263 mep_lookup_pragma_flag (const char *funcname, int flag)
4265 pragma_entry **slot;
4267 if (!pragma_htab)
4268 return false;
4270 if (funcname[0] == '@' && funcname[2] == '.')
4271 funcname += 3;
4273 slot = (pragma_entry **)
4274 htab_find_slot_with_hash (pragma_htab, funcname,
4275 htab_hash_string (funcname), NO_INSERT);
4276 if (slot && *slot && ((*slot)->flag & flag))
4278 (*slot)->used |= flag;
4279 return true;
4281 return false;
4284 bool
4285 mep_lookup_pragma_call (const char *funcname)
4287 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4290 void
4291 mep_note_pragma_call (const char *funcname)
4293 mep_note_pragma_flag (funcname, FUNC_CALL);
4296 bool
4297 mep_lookup_pragma_disinterrupt (const char *funcname)
4299 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4302 void
4303 mep_note_pragma_disinterrupt (const char *funcname)
4305 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4308 static int
4309 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4311 const pragma_entry *d = (const pragma_entry *)(*slot);
4313 if ((d->flag & FUNC_DISINTERRUPT)
4314 && !(d->used & FUNC_DISINTERRUPT))
4315 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4316 return 1;
4319 void
4320 mep_file_cleanups (void)
4322 if (pragma_htab)
4323 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4327 static int
4328 mep_attrlist_to_encoding (tree list, tree decl)
4330 if (mep_multiple_address_regions (list, false) > 1)
4332 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4333 TREE_PURPOSE (TREE_CHAIN (list)),
4334 DECL_NAME (decl),
4335 DECL_SOURCE_LINE (decl));
4336 TREE_CHAIN (list) = NULL_TREE;
4339 while (list)
4341 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4342 return 'b';
4343 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4344 return 't';
4345 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4346 return 'n';
4347 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4348 return 'f';
4349 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4351 if (TREE_VALUE (list)
4352 && TREE_VALUE (TREE_VALUE (list))
4353 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4355 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4356 if (location >= 0
4357 && location <= 0x1000000)
4358 return 'i';
4360 return 'I';
4362 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4363 return 'c';
4364 list = TREE_CHAIN (list);
4366 if (TARGET_TF
4367 && TREE_CODE (decl) == FUNCTION_DECL
4368 && DECL_SECTION_NAME (decl) == 0)
4369 return 'f';
4370 return 0;
4373 static int
4374 mep_comp_type_attributes (const_tree t1, const_tree t2)
4376 int vliw1, vliw2;
4378 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4379 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4381 if (vliw1 != vliw2)
4382 return 0;
4384 return 1;
4387 static void
4388 mep_insert_attributes (tree decl, tree *attributes)
4390 int size;
4391 const char *secname = 0;
4392 tree attrib, attrlist;
4393 char encoding;
4395 if (TREE_CODE (decl) == FUNCTION_DECL)
4397 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4399 if (mep_lookup_pragma_disinterrupt (funcname))
4401 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4402 *attributes = chainon (*attributes, attrib);
4406 if (TREE_CODE (decl) != VAR_DECL
4407 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4408 return;
4410 if (TREE_READONLY (decl) && TARGET_DC)
4411 /* -mdc means that const variables default to the near section,
4412 regardless of the size cutoff. */
4413 return;
4415 /* User specified an attribute, so override the default.
4416 Ignore storage attribute of pointed to variable. char __far * x; */
4417 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4419 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4420 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4421 else if (DECL_ATTRIBUTES (decl) && *attributes)
4422 DECL_ATTRIBUTES (decl) = NULL_TREE;
4425 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4426 encoding = mep_attrlist_to_encoding (attrlist, decl);
4427 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4429 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4430 encoding = mep_attrlist_to_encoding (attrlist, decl);
4432 if (encoding)
4434 /* This means that the declaration has a specific section
4435 attribute, so we should not apply the default rules. */
4437 if (encoding == 'i' || encoding == 'I')
4439 tree attr = lookup_attribute ("io", attrlist);
4440 if (attr
4441 && TREE_VALUE (attr)
4442 && TREE_VALUE (TREE_VALUE(attr)))
4444 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4445 static tree previous_value = 0;
4446 static int previous_location = 0;
4447 static tree previous_name = 0;
4449 /* We take advantage of the fact that gcc will reuse the
4450 same tree pointer when applying an attribute to a
4451 list of decls, but produce a new tree for attributes
4452 on separate source lines, even when they're textually
4453 identical. This is the behavior we want. */
4454 if (TREE_VALUE (attr) == previous_value
4455 && location == previous_location)
4457 warning(0, "__io address 0x%x is the same for %qE and %qE",
4458 location, previous_name, DECL_NAME (decl));
4460 previous_name = DECL_NAME (decl);
4461 previous_location = location;
4462 previous_value = TREE_VALUE (attr);
4465 return;
4469 /* Declarations of arrays can change size. Don't trust them. */
4470 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4471 size = 0;
4472 else
4473 size = int_size_in_bytes (TREE_TYPE (decl));
4475 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4477 if (TREE_PUBLIC (decl)
4478 || DECL_EXTERNAL (decl)
4479 || TREE_STATIC (decl))
4481 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4482 int key = 0;
4484 while (*name)
4485 key += *name++;
4487 switch (key & 3)
4489 case 0:
4490 secname = "based";
4491 break;
4492 case 1:
4493 secname = "tiny";
4494 break;
4495 case 2:
4496 secname = "far";
4497 break;
4498 default:
4503 else
4505 if (size <= mep_based_cutoff && size > 0)
4506 secname = "based";
4507 else if (size <= mep_tiny_cutoff && size > 0)
4508 secname = "tiny";
4509 else if (TARGET_L)
4510 secname = "far";
4513 if (mep_const_section && TREE_READONLY (decl))
4515 if (strcmp (mep_const_section, "tiny") == 0)
4516 secname = "tiny";
4517 else if (strcmp (mep_const_section, "near") == 0)
4518 return;
4519 else if (strcmp (mep_const_section, "far") == 0)
4520 secname = "far";
4523 if (!secname)
4524 return;
4526 if (!mep_multiple_address_regions (*attributes, true)
4527 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4529 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4531 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4532 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4533 and mep_validate_based_tiny. */
4534 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4538 static void
4539 mep_encode_section_info (tree decl, rtx rtl, int first)
4541 rtx rtlname;
4542 const char *oldname;
4543 const char *secname;
4544 char encoding;
4545 char *newname;
4546 tree idp;
4547 int maxsize;
4548 tree type;
4549 tree mep_attributes;
4551 if (! first)
4552 return;
4554 if (TREE_CODE (decl) != VAR_DECL
4555 && TREE_CODE (decl) != FUNCTION_DECL)
4556 return;
4558 rtlname = XEXP (rtl, 0);
4559 if (GET_CODE (rtlname) == SYMBOL_REF)
4560 oldname = XSTR (rtlname, 0);
4561 else if (GET_CODE (rtlname) == MEM
4562 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4563 oldname = XSTR (XEXP (rtlname, 0), 0);
4564 else
4565 gcc_unreachable ();
4567 type = TREE_TYPE (decl);
4568 if (type == error_mark_node)
4569 return;
4570 mep_attributes = MEP_ATTRIBUTES (decl);
4572 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4574 if (encoding)
4576 newname = (char *) alloca (strlen (oldname) + 4);
4577 sprintf (newname, "@%c.%s", encoding, oldname);
4578 idp = get_identifier (newname);
4579 XEXP (rtl, 0) =
4580 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4581 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4582 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4584 switch (encoding)
4586 case 'b':
4587 maxsize = 128;
4588 secname = "based";
4589 break;
4590 case 't':
4591 maxsize = 65536;
4592 secname = "tiny";
4593 break;
4594 case 'n':
4595 maxsize = 0x1000000;
4596 secname = "near";
4597 break;
4598 default:
4599 maxsize = 0;
4600 secname = 0;
4601 break;
4603 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4605 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4606 oldname,
4607 (long) int_size_in_bytes (TREE_TYPE (decl)),
4608 secname,
4609 maxsize);
4614 const char *
4615 mep_strip_name_encoding (const char *sym)
4617 while (1)
4619 if (*sym == '*')
4620 sym++;
4621 else if (*sym == '@' && sym[2] == '.')
4622 sym += 3;
4623 else
4624 return sym;
4628 static section *
4629 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4630 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4632 int readonly = 1;
4633 int encoding;
4635 switch (TREE_CODE (decl))
4637 case VAR_DECL:
4638 if (!TREE_READONLY (decl)
4639 || TREE_SIDE_EFFECTS (decl)
4640 || !DECL_INITIAL (decl)
4641 || (DECL_INITIAL (decl) != error_mark_node
4642 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4643 readonly = 0;
4644 break;
4645 case CONSTRUCTOR:
4646 if (! TREE_CONSTANT (decl))
4647 readonly = 0;
4648 break;
4650 default:
4651 break;
4654 if (TREE_CODE (decl) == FUNCTION_DECL)
4656 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4658 if (name[0] == '@' && name[2] == '.')
4659 encoding = name[1];
4660 else
4661 encoding = 0;
4663 if (flag_function_sections || DECL_ONE_ONLY (decl))
4664 mep_unique_section (decl, 0);
4665 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4667 if (encoding == 'f')
4668 return vftext_section;
4669 else
4670 return vtext_section;
4672 else if (encoding == 'f')
4673 return ftext_section;
4674 else
4675 return text_section;
4678 if (TREE_CODE (decl) == VAR_DECL)
4680 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4682 if (name[0] == '@' && name[2] == '.')
4683 switch (name[1])
4685 case 'b':
4686 return based_section;
4688 case 't':
4689 if (readonly)
4690 return srodata_section;
4691 if (DECL_INITIAL (decl))
4692 return sdata_section;
4693 return tinybss_section;
4695 case 'f':
4696 if (readonly)
4697 return frodata_section;
4698 return far_section;
4700 case 'i':
4701 case 'I':
4702 error_at (DECL_SOURCE_LOCATION (decl),
4703 "variable %D of type %<io%> must be uninitialized", decl);
4704 return data_section;
4706 case 'c':
4707 error_at (DECL_SOURCE_LOCATION (decl),
4708 "variable %D of type %<cb%> must be uninitialized", decl);
4709 return data_section;
4713 if (readonly)
4714 return readonly_data_section;
4716 return data_section;
4719 static void
4720 mep_unique_section (tree decl, int reloc)
4722 static const char *prefixes[][2] =
4724 { ".text.", ".gnu.linkonce.t." },
4725 { ".rodata.", ".gnu.linkonce.r." },
4726 { ".data.", ".gnu.linkonce.d." },
4727 { ".based.", ".gnu.linkonce.based." },
4728 { ".sdata.", ".gnu.linkonce.s." },
4729 { ".far.", ".gnu.linkonce.far." },
4730 { ".ftext.", ".gnu.linkonce.ft." },
4731 { ".frodata.", ".gnu.linkonce.frd." },
4732 { ".srodata.", ".gnu.linkonce.srd." },
4733 { ".vtext.", ".gnu.linkonce.v." },
4734 { ".vftext.", ".gnu.linkonce.vf." }
4736 int sec = 2; /* .data */
4737 int len;
4738 const char *name, *prefix;
4739 char *string;
4741 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4742 if (DECL_RTL (decl))
4743 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4745 if (TREE_CODE (decl) == FUNCTION_DECL)
4747 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4748 sec = 9; /* .vtext */
4749 else
4750 sec = 0; /* .text */
4752 else if (decl_readonly_section (decl, reloc))
4753 sec = 1; /* .rodata */
4755 if (name[0] == '@' && name[2] == '.')
4757 switch (name[1])
4759 case 'b':
4760 sec = 3; /* .based */
4761 break;
4762 case 't':
4763 if (sec == 1)
4764 sec = 8; /* .srodata */
4765 else
4766 sec = 4; /* .sdata */
4767 break;
4768 case 'f':
4769 if (sec == 0)
4770 sec = 6; /* .ftext */
4771 else if (sec == 9)
4772 sec = 10; /* .vftext */
4773 else if (sec == 1)
4774 sec = 7; /* .frodata */
4775 else
4776 sec = 5; /* .far. */
4777 break;
4779 name += 3;
4782 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4783 len = strlen (name) + strlen (prefix);
4784 string = (char *) alloca (len + 1);
4786 sprintf (string, "%s%s", prefix, name);
4788 DECL_SECTION_NAME (decl) = build_string (len, string);
4791 /* Given a decl, a section name, and whether the decl initializer
4792 has relocs, choose attributes for the section. */
4794 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4796 static unsigned int
4797 mep_section_type_flags (tree decl, const char *name, int reloc)
4799 unsigned int flags = default_section_type_flags (decl, name, reloc);
4801 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4802 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4803 flags |= SECTION_MEP_VLIW;
4805 return flags;
4808 /* Switch to an arbitrary section NAME with attributes as specified
4809 by FLAGS. ALIGN specifies any known alignment requirements for
4810 the section; 0 if the default should be used.
4812 Differs from the standard ELF version only in support of VLIW mode. */
4814 static void
4815 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4817 char flagchars[8], *f = flagchars;
4818 const char *type;
4820 if (!(flags & SECTION_DEBUG))
4821 *f++ = 'a';
4822 if (flags & SECTION_WRITE)
4823 *f++ = 'w';
4824 if (flags & SECTION_CODE)
4825 *f++ = 'x';
4826 if (flags & SECTION_SMALL)
4827 *f++ = 's';
4828 if (flags & SECTION_MEP_VLIW)
4829 *f++ = 'v';
4830 *f = '\0';
4832 if (flags & SECTION_BSS)
4833 type = "nobits";
4834 else
4835 type = "progbits";
4837 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4838 name, flagchars, type);
4840 if (flags & SECTION_CODE)
4841 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4842 asm_out_file);
4845 void
4846 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4847 int size, int align, int global)
4849 /* We intentionally don't use mep_section_tag() here. */
4850 if (name[0] == '@'
4851 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4852 && name[2] == '.')
4854 int location = -1;
4855 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4856 DECL_ATTRIBUTES (decl));
4857 if (attr
4858 && TREE_VALUE (attr)
4859 && TREE_VALUE (TREE_VALUE(attr)))
4860 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4861 if (location == -1)
4862 return;
4863 if (global)
4865 fprintf (stream, "\t.globl\t");
4866 assemble_name (stream, name);
4867 fprintf (stream, "\n");
4869 assemble_name (stream, name);
4870 fprintf (stream, " = %d\n", location);
4871 return;
4873 if (name[0] == '@' && name[2] == '.')
4875 const char *sec = 0;
4876 switch (name[1])
4878 case 'b':
4879 switch_to_section (based_section);
4880 sec = ".based";
4881 break;
4882 case 't':
4883 switch_to_section (tinybss_section);
4884 sec = ".sbss";
4885 break;
4886 case 'f':
4887 switch_to_section (farbss_section);
4888 sec = ".farbss";
4889 break;
4891 if (sec)
4893 const char *name2;
4894 int p2align = 0;
4896 while (align > BITS_PER_UNIT)
4898 align /= 2;
4899 p2align ++;
4901 name2 = TARGET_STRIP_NAME_ENCODING (name);
4902 if (global)
4903 fprintf (stream, "\t.globl\t%s\n", name2);
4904 fprintf (stream, "\t.p2align %d\n", p2align);
4905 fprintf (stream, "\t.type\t%s,@object\n", name2);
4906 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4907 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4908 return;
4912 if (!global)
4914 fprintf (stream, "\t.local\t");
4915 assemble_name (stream, name);
4916 fprintf (stream, "\n");
4918 fprintf (stream, "\t.comm\t");
4919 assemble_name (stream, name);
4920 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4923 /* Trampolines. */
4925 void
4926 mep_init_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
4928 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4929 LCT_NORMAL, VOIDmode, 3,
4930 addr, Pmode,
4931 fnaddr, Pmode,
4932 static_chain, Pmode);
4935 /* Experimental Reorg. */
4937 static bool
4938 mep_mentioned_p (rtx in,
4939 rtx reg, /* NULL for mem */
4940 int modes_too) /* if nonzero, modes must match also. */
4942 const char *fmt;
4943 int i;
4944 enum rtx_code code;
4946 if (in == 0)
4947 return false;
4948 if (reg && GET_CODE (reg) != REG)
4949 return false;
4951 if (GET_CODE (in) == LABEL_REF)
4952 return (reg == 0);
4954 code = GET_CODE (in);
4956 switch (code)
4958 case MEM:
4959 if (reg)
4960 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4961 return true;
4963 case REG:
4964 if (!reg)
4965 return false;
4966 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4967 return false;
4968 return (REGNO (in) == REGNO (reg));
4970 case SCRATCH:
4971 case CC0:
4972 case PC:
4973 case CONST_INT:
4974 case CONST_DOUBLE:
4975 return false;
4977 default:
4978 break;
4981 /* Set's source should be read-only. */
4982 if (code == SET && !reg)
4983 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4985 fmt = GET_RTX_FORMAT (code);
4987 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4989 if (fmt[i] == 'E')
4991 register int j;
4992 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4993 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4994 return true;
4996 else if (fmt[i] == 'e'
4997 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4998 return true;
5000 return false;
5003 #define EXPERIMENTAL_REGMOVE_REORG 1
5005 #if EXPERIMENTAL_REGMOVE_REORG
5007 static int
5008 mep_compatible_reg_class (int r1, int r2)
5010 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
5011 return 1;
5012 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
5013 return 1;
5014 return 0;
5017 static void
5018 mep_reorg_regmove (rtx insns)
5020 rtx insn, next, pat, follow, *where;
5021 int count = 0, done = 0, replace, before = 0;
5023 if (dump_file)
5024 for (insn = insns; insn; insn = NEXT_INSN (insn))
5025 if (GET_CODE (insn) == INSN)
5026 before++;
5028 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5029 set that uses the r2 and r2 dies there. We replace r2 with r1
5030 and see if it's still a valid insn. If so, delete the first set.
5031 Copied from reorg.c. */
5033 while (!done)
5035 done = 1;
5036 for (insn = insns; insn; insn = next)
5038 next = NEXT_INSN (insn);
5039 if (GET_CODE (insn) != INSN)
5040 continue;
5041 pat = PATTERN (insn);
5043 replace = 0;
5045 if (GET_CODE (pat) == SET
5046 && GET_CODE (SET_SRC (pat)) == REG
5047 && GET_CODE (SET_DEST (pat)) == REG
5048 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5049 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5051 follow = next_nonnote_insn (insn);
5052 if (dump_file)
5053 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5055 while (follow && GET_CODE (follow) == INSN
5056 && GET_CODE (PATTERN (follow)) == SET
5057 && !dead_or_set_p (follow, SET_SRC (pat))
5058 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5059 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5061 if (dump_file)
5062 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5063 follow = next_nonnote_insn (follow);
5066 if (dump_file)
5067 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5068 if (follow && GET_CODE (follow) == INSN
5069 && GET_CODE (PATTERN (follow)) == SET
5070 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5072 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5074 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5076 replace = 1;
5077 where = & SET_SRC (PATTERN (follow));
5080 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5082 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5084 replace = 1;
5085 where = & PATTERN (follow);
5091 /* If so, follow is the corresponding insn */
5092 if (replace)
5094 if (dump_file)
5096 rtx x;
5098 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5099 for (x = insn; x ;x = NEXT_INSN (x))
5101 print_rtl_single (dump_file, x);
5102 if (x == follow)
5103 break;
5104 fprintf (dump_file, "\n");
5108 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5109 follow, where))
5111 count ++;
5112 next = delete_insn (insn);
5113 if (dump_file)
5115 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5116 print_rtl_single (dump_file, follow);
5118 done = 0;
5124 if (dump_file)
5126 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5127 fprintf (dump_file, "=====\n");
5130 #endif
5133 /* Figure out where to put LABEL, which is the label for a repeat loop.
5134 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5135 the loop ends just before LAST_INSN. If SHARED, insns other than the
5136 "repeat" might use LABEL to jump to the loop's continuation point.
5138 Return the last instruction in the adjusted loop. */
5140 static rtx
5141 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5142 bool shared)
5144 rtx next, prev;
5145 int count = 0, code, icode;
5147 if (dump_file)
5148 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5149 INSN_UID (last_insn));
5151 /* Set PREV to the last insn in the loop. */
5152 prev = last_insn;
5153 if (!including)
5154 prev = PREV_INSN (prev);
5156 /* Set NEXT to the next insn after the repeat label. */
5157 next = last_insn;
5158 if (!shared)
5159 while (prev != 0)
5161 code = GET_CODE (prev);
5162 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5163 break;
5165 if (INSN_P (prev))
5167 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5168 prev = XVECEXP (PATTERN (prev), 0, 1);
5170 /* Other insns that should not be in the last two opcodes. */
5171 icode = recog_memoized (prev);
5172 if (icode < 0
5173 || icode == CODE_FOR_repeat
5174 || icode == CODE_FOR_erepeat
5175 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5176 break;
5178 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5179 is the second instruction in a VLIW bundle. In that case,
5180 loop again: if the first instruction also satisfies the
5181 conditions above then we will reach here again and put
5182 both of them into the repeat epilogue. Otherwise both
5183 should remain outside. */
5184 if (GET_MODE (prev) != BImode)
5186 count++;
5187 next = prev;
5188 if (dump_file)
5189 print_rtl_single (dump_file, next);
5190 if (count == 2)
5191 break;
5194 prev = PREV_INSN (prev);
5197 /* See if we're adding the label immediately after the repeat insn.
5198 If so, we need to separate them with a nop. */
5199 prev = prev_real_insn (next);
5200 if (prev)
5201 switch (recog_memoized (prev))
5203 case CODE_FOR_repeat:
5204 case CODE_FOR_erepeat:
5205 if (dump_file)
5206 fprintf (dump_file, "Adding nop inside loop\n");
5207 emit_insn_before (gen_nop (), next);
5208 break;
5210 default:
5211 break;
5214 /* Insert the label. */
5215 emit_label_before (label, next);
5217 /* Insert the nops. */
5218 if (dump_file && count < 2)
5219 fprintf (dump_file, "Adding %d nop%s\n\n",
5220 2 - count, count == 1 ? "" : "s");
5222 for (; count < 2; count++)
5223 if (including)
5224 last_insn = emit_insn_after (gen_nop (), last_insn);
5225 else
5226 emit_insn_before (gen_nop (), last_insn);
5228 return last_insn;
5232 void
5233 mep_emit_doloop (rtx *operands, int is_end)
5235 rtx tag;
5237 if (cfun->machine->doloop_tags == 0
5238 || cfun->machine->doloop_tag_from_end == is_end)
5240 cfun->machine->doloop_tags++;
5241 cfun->machine->doloop_tag_from_end = is_end;
5244 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5245 if (is_end)
5246 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5247 else
5248 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5252 /* Code for converting doloop_begins and doloop_ends into valid
5253 MeP instructions. A doloop_begin is just a placeholder:
5255 $count = unspec ($count)
5257 where $count is initially the number of iterations - 1.
5258 doloop_end has the form:
5260 if ($count-- == 0) goto label
5262 The counter variable is private to the doloop insns, nothing else
5263 relies on its value.
5265 There are three cases, in decreasing order of preference:
5267 1. A loop has exactly one doloop_begin and one doloop_end.
5268 The doloop_end branches to the first instruction after
5269 the doloop_begin.
5271 In this case we can replace the doloop_begin with a repeat
5272 instruction and remove the doloop_end. I.e.:
5274 $count1 = unspec ($count1)
5275 label:
5277 insn1
5278 insn2
5279 if ($count2-- == 0) goto label
5281 becomes:
5283 repeat $count1,repeat_label
5284 label:
5286 repeat_label:
5287 insn1
5288 insn2
5289 # end repeat
5291 2. As for (1), except there are several doloop_ends. One of them
5292 (call it X) falls through to a label L. All the others fall
5293 through to branches to L.
5295 In this case, we remove X and replace the other doloop_ends
5296 with branches to the repeat label. For example:
5298 $count1 = unspec ($count1)
5299 start:
5301 if ($count2-- == 0) goto label
5302 end:
5304 if ($count3-- == 0) goto label
5305 goto end
5307 becomes:
5309 repeat $count1,repeat_label
5310 start:
5312 repeat_label:
5315 # end repeat
5316 end:
5318 goto repeat_label
5320 3. The fallback case. Replace doloop_begins with:
5322 $count = $count + 1
5324 Replace doloop_ends with the equivalent of:
5326 $count = $count - 1
5327 if ($count == 0) goto label
5329 Note that this might need a scratch register if $count
5330 is stored in memory. */
5332 /* A structure describing one doloop_begin. */
5333 struct mep_doloop_begin {
5334 /* The next doloop_begin with the same tag. */
5335 struct mep_doloop_begin *next;
5337 /* The instruction itself. */
5338 rtx insn;
5340 /* The initial counter value. This is known to be a general register. */
5341 rtx counter;
5344 /* A structure describing a doloop_end. */
5345 struct mep_doloop_end {
5346 /* The next doloop_end with the same loop tag. */
5347 struct mep_doloop_end *next;
5349 /* The instruction itself. */
5350 rtx insn;
5352 /* The first instruction after INSN when the branch isn't taken. */
5353 rtx fallthrough;
5355 /* The location of the counter value. Since doloop_end_internal is a
5356 jump instruction, it has to allow the counter to be stored anywhere
5357 (any non-fixed register or memory location). */
5358 rtx counter;
5360 /* The target label (the place where the insn branches when the counter
5361 isn't zero). */
5362 rtx label;
5364 /* A scratch register. Only available when COUNTER isn't stored
5365 in a general register. */
5366 rtx scratch;
5370 /* One do-while loop. */
5371 struct mep_doloop {
5372 /* All the doloop_begins for this loop (in no particular order). */
5373 struct mep_doloop_begin *begin;
5375 /* All the doloop_ends. When there is more than one, arrange things
5376 so that the first one is the most likely to be X in case (2) above. */
5377 struct mep_doloop_end *end;
5381 /* Return true if LOOP can be converted into repeat/repeat_end form
5382 (that is, if it matches cases (1) or (2) above). */
5384 static bool
5385 mep_repeat_loop_p (struct mep_doloop *loop)
5387 struct mep_doloop_end *end;
5388 rtx fallthrough;
5390 /* There must be exactly one doloop_begin and at least one doloop_end. */
5391 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5392 return false;
5394 /* The first doloop_end (X) must branch back to the insn after
5395 the doloop_begin. */
5396 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5397 return false;
5399 /* All the other doloop_ends must branch to the same place as X.
5400 When the branch isn't taken, they must jump to the instruction
5401 after X. */
5402 fallthrough = loop->end->fallthrough;
5403 for (end = loop->end->next; end != 0; end = end->next)
5404 if (end->label != loop->end->label
5405 || !simplejump_p (end->fallthrough)
5406 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5407 return false;
5409 return true;
5413 /* The main repeat reorg function. See comment above for details. */
5415 static void
5416 mep_reorg_repeat (rtx insns)
5418 rtx insn;
5419 struct mep_doloop *loops, *loop;
5420 struct mep_doloop_begin *begin;
5421 struct mep_doloop_end *end;
5423 /* Quick exit if we haven't created any loops. */
5424 if (cfun->machine->doloop_tags == 0)
5425 return;
5427 /* Create an array of mep_doloop structures. */
5428 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5429 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5431 /* Search the function for do-while insns and group them by loop tag. */
5432 for (insn = insns; insn; insn = NEXT_INSN (insn))
5433 if (INSN_P (insn))
5434 switch (recog_memoized (insn))
5436 case CODE_FOR_doloop_begin_internal:
5437 insn_extract (insn);
5438 loop = &loops[INTVAL (recog_data.operand[2])];
5440 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5441 begin->next = loop->begin;
5442 begin->insn = insn;
5443 begin->counter = recog_data.operand[0];
5445 loop->begin = begin;
5446 break;
5448 case CODE_FOR_doloop_end_internal:
5449 insn_extract (insn);
5450 loop = &loops[INTVAL (recog_data.operand[2])];
5452 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5453 end->insn = insn;
5454 end->fallthrough = next_real_insn (insn);
5455 end->counter = recog_data.operand[0];
5456 end->label = recog_data.operand[1];
5457 end->scratch = recog_data.operand[3];
5459 /* If this insn falls through to an unconditional jump,
5460 give it a lower priority than the others. */
5461 if (loop->end != 0 && simplejump_p (end->fallthrough))
5463 end->next = loop->end->next;
5464 loop->end->next = end;
5466 else
5468 end->next = loop->end;
5469 loop->end = end;
5471 break;
5474 /* Convert the insns for each loop in turn. */
5475 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5476 if (mep_repeat_loop_p (loop))
5478 /* Case (1) or (2). */
5479 rtx repeat_label, label_ref;
5481 /* Create a new label for the repeat insn. */
5482 repeat_label = gen_label_rtx ();
5484 /* Replace the doloop_begin with a repeat. */
5485 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5486 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5487 loop->begin->insn);
5488 delete_insn (loop->begin->insn);
5490 /* Insert the repeat label before the first doloop_end.
5491 Fill the gap with nops if there are other doloop_ends. */
5492 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5493 false, loop->end->next != 0);
5495 /* Emit a repeat_end (to improve the readability of the output). */
5496 emit_insn_before (gen_repeat_end (), loop->end->insn);
5498 /* Delete the first doloop_end. */
5499 delete_insn (loop->end->insn);
5501 /* Replace the others with branches to REPEAT_LABEL. */
5502 for (end = loop->end->next; end != 0; end = end->next)
5504 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5505 delete_insn (end->insn);
5506 delete_insn (end->fallthrough);
5509 else
5511 /* Case (3). First replace all the doloop_begins with increment
5512 instructions. */
5513 for (begin = loop->begin; begin != 0; begin = begin->next)
5515 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5516 begin->counter, const1_rtx),
5517 begin->insn);
5518 delete_insn (begin->insn);
5521 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5522 for (end = loop->end; end != 0; end = end->next)
5524 rtx reg;
5526 start_sequence ();
5528 /* Load the counter value into a general register. */
5529 reg = end->counter;
5530 if (!REG_P (reg) || REGNO (reg) > 15)
5532 reg = end->scratch;
5533 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5536 /* Decrement the counter. */
5537 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5538 constm1_rtx));
5540 /* Copy it back to its original location. */
5541 if (reg != end->counter)
5542 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5544 /* Jump back to the start label. */
5545 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5546 end->label));
5547 JUMP_LABEL (insn) = end->label;
5548 LABEL_NUSES (end->label)++;
5550 /* Emit the whole sequence before the doloop_end. */
5551 insn = get_insns ();
5552 end_sequence ();
5553 emit_insn_before (insn, end->insn);
5555 /* Delete the doloop_end. */
5556 delete_insn (end->insn);
5562 static bool
5563 mep_invertable_branch_p (rtx insn)
5565 rtx cond, set;
5566 enum rtx_code old_code;
5567 int i;
5569 set = PATTERN (insn);
5570 if (GET_CODE (set) != SET)
5571 return false;
5572 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5573 return false;
5574 cond = XEXP (XEXP (set, 1), 0);
5575 old_code = GET_CODE (cond);
5576 switch (old_code)
5578 case EQ:
5579 PUT_CODE (cond, NE);
5580 break;
5581 case NE:
5582 PUT_CODE (cond, EQ);
5583 break;
5584 case LT:
5585 PUT_CODE (cond, GE);
5586 break;
5587 case GE:
5588 PUT_CODE (cond, LT);
5589 break;
5590 default:
5591 return false;
5593 INSN_CODE (insn) = -1;
5594 i = recog_memoized (insn);
5595 PUT_CODE (cond, old_code);
5596 INSN_CODE (insn) = -1;
5597 return i >= 0;
5600 static void
5601 mep_invert_branch (rtx insn, rtx after)
5603 rtx cond, set, label;
5604 int i;
5606 set = PATTERN (insn);
5608 gcc_assert (GET_CODE (set) == SET);
5609 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5611 cond = XEXP (XEXP (set, 1), 0);
5612 switch (GET_CODE (cond))
5614 case EQ:
5615 PUT_CODE (cond, NE);
5616 break;
5617 case NE:
5618 PUT_CODE (cond, EQ);
5619 break;
5620 case LT:
5621 PUT_CODE (cond, GE);
5622 break;
5623 case GE:
5624 PUT_CODE (cond, LT);
5625 break;
5626 default:
5627 gcc_unreachable ();
5629 label = gen_label_rtx ();
5630 emit_label_after (label, after);
5631 for (i=1; i<=2; i++)
5632 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5634 rtx ref = XEXP (XEXP (set, 1), i);
5635 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5636 delete_insn (XEXP (ref, 0));
5637 XEXP (ref, 0) = label;
5638 LABEL_NUSES (label) ++;
5639 JUMP_LABEL (insn) = label;
5641 INSN_CODE (insn) = -1;
5642 i = recog_memoized (insn);
5643 gcc_assert (i >= 0);
5646 static void
5647 mep_reorg_erepeat (rtx insns)
5649 rtx insn, prev, label_before, l, x;
5650 int count;
5652 for (insn = insns; insn; insn = NEXT_INSN (insn))
5653 if (JUMP_P (insn)
5654 && ! JUMP_TABLE_DATA_P (insn)
5655 && mep_invertable_branch_p (insn))
5657 if (dump_file)
5659 fprintf (dump_file, "\n------------------------------\n");
5660 fprintf (dump_file, "erepeat: considering this jump:\n");
5661 print_rtl_single (dump_file, insn);
5663 count = simplejump_p (insn) ? 0 : 1;
5664 label_before = 0;
5665 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5667 if (GET_CODE (prev) == CALL_INSN
5668 || BARRIER_P (prev))
5669 break;
5671 if (prev == JUMP_LABEL (insn))
5673 rtx newlast;
5674 if (dump_file)
5675 fprintf (dump_file, "found loop top, %d insns\n", count);
5677 if (LABEL_NUSES (prev) == 1)
5678 /* We're the only user, always safe */ ;
5679 else if (LABEL_NUSES (prev) == 2)
5681 /* See if there's a barrier before this label. If
5682 so, we know nobody inside the loop uses it.
5683 But we must be careful to put the erepeat
5684 *after* the label. */
5685 rtx barrier;
5686 for (barrier = PREV_INSN (prev);
5687 barrier && GET_CODE (barrier) == NOTE;
5688 barrier = PREV_INSN (barrier))
5690 if (barrier && GET_CODE (barrier) != BARRIER)
5691 break;
5693 else
5695 /* We don't know who else, within or without our loop, uses this */
5696 if (dump_file)
5697 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5698 break;
5701 /* Generate a label to be used by the erepat insn. */
5702 l = gen_label_rtx ();
5704 /* Insert the erepeat after INSN's target label. */
5705 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5706 LABEL_NUSES (l)++;
5707 emit_insn_after (x, prev);
5709 /* Insert the erepeat label. */
5710 newlast = (mep_insert_repeat_label_last
5711 (insn, l, !simplejump_p (insn), false));
5712 if (simplejump_p (insn))
5714 emit_insn_before (gen_erepeat_end (), insn);
5715 delete_insn (insn);
5717 else
5719 mep_invert_branch (insn, newlast);
5720 emit_insn_after (gen_erepeat_end (), newlast);
5722 break;
5725 if (LABEL_P (prev))
5727 /* A label is OK if there is exactly one user, and we
5728 can find that user before the next label. */
5729 rtx user = 0;
5730 int safe = 0;
5731 if (LABEL_NUSES (prev) == 1)
5733 for (user = PREV_INSN (prev);
5734 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5735 user = PREV_INSN (user))
5736 if (GET_CODE (user) == JUMP_INSN
5737 && JUMP_LABEL (user) == prev)
5739 safe = INSN_UID (user);
5740 break;
5743 if (!safe)
5744 break;
5745 if (dump_file)
5746 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5747 safe, INSN_UID (prev));
5750 if (INSN_P (prev))
5752 count ++;
5753 if (count == 2)
5754 label_before = prev;
5758 if (dump_file)
5759 fprintf (dump_file, "\n==============================\n");
5762 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5763 always do this on its own. */
5765 static void
5766 mep_jmp_return_reorg (rtx insns)
5768 rtx insn, label, ret;
5769 int ret_code;
5771 for (insn = insns; insn; insn = NEXT_INSN (insn))
5772 if (simplejump_p (insn))
5774 /* Find the fist real insn the jump jumps to. */
5775 label = ret = JUMP_LABEL (insn);
5776 while (ret
5777 && (GET_CODE (ret) == NOTE
5778 || GET_CODE (ret) == CODE_LABEL
5779 || GET_CODE (PATTERN (ret)) == USE))
5780 ret = NEXT_INSN (ret);
5782 if (ret)
5784 /* Is it a return? */
5785 ret_code = recog_memoized (ret);
5786 if (ret_code == CODE_FOR_return_internal
5787 || ret_code == CODE_FOR_eh_return_internal)
5789 /* It is. Replace the jump with a return. */
5790 LABEL_NUSES (label) --;
5791 if (LABEL_NUSES (label) == 0)
5792 delete_insn (label);
5793 PATTERN (insn) = copy_rtx (PATTERN (ret));
5794 INSN_CODE (insn) = -1;
5801 static void
5802 mep_reorg_addcombine (rtx insns)
5804 rtx i, n;
5806 for (i = insns; i; i = NEXT_INSN (i))
5807 if (INSN_P (i)
5808 && INSN_CODE (i) == CODE_FOR_addsi3
5809 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5810 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5811 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5812 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5814 n = NEXT_INSN (i);
5815 if (INSN_P (n)
5816 && INSN_CODE (n) == CODE_FOR_addsi3
5817 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5818 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5819 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5820 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5822 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5823 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5824 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5825 && ic + nc < 32767
5826 && ic + nc > -32768)
5828 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5829 NEXT_INSN (i) = NEXT_INSN (n);
5830 if (NEXT_INSN (i))
5831 PREV_INSN (NEXT_INSN (i)) = i;
5837 /* If this insn adjusts the stack, return the adjustment, else return
5838 zero. */
5839 static int
5840 add_sp_insn_p (rtx insn)
5842 rtx pat;
5844 if (! single_set (insn))
5845 return 0;
5846 pat = PATTERN (insn);
5847 if (GET_CODE (SET_DEST (pat)) != REG)
5848 return 0;
5849 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5850 return 0;
5851 if (GET_CODE (SET_SRC (pat)) != PLUS)
5852 return 0;
5853 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5854 return 0;
5855 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5856 return 0;
5857 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5858 return 0;
5859 return INTVAL (XEXP (SET_SRC (pat), 1));
5862 /* Check for trivial functions that set up an unneeded stack
5863 frame. */
5864 static void
5865 mep_reorg_noframe (rtx insns)
5867 rtx start_frame_insn;
5868 rtx end_frame_insn = 0;
5869 int sp_adjust, sp2;
5870 rtx sp;
5872 /* The first insn should be $sp = $sp + N */
5873 while (insns && ! INSN_P (insns))
5874 insns = NEXT_INSN (insns);
5875 if (!insns)
5876 return;
5878 sp_adjust = add_sp_insn_p (insns);
5879 if (sp_adjust == 0)
5880 return;
5882 start_frame_insn = insns;
5883 sp = SET_DEST (PATTERN (start_frame_insn));
5885 insns = next_real_insn (insns);
5887 while (insns)
5889 rtx next = next_real_insn (insns);
5890 if (!next)
5891 break;
5893 sp2 = add_sp_insn_p (insns);
5894 if (sp2)
5896 if (end_frame_insn)
5897 return;
5898 end_frame_insn = insns;
5899 if (sp2 != -sp_adjust)
5900 return;
5902 else if (mep_mentioned_p (insns, sp, 0))
5903 return;
5904 else if (CALL_P (insns))
5905 return;
5907 insns = next;
5910 if (end_frame_insn)
5912 delete_insn (start_frame_insn);
5913 delete_insn (end_frame_insn);
5917 static void
5918 mep_reorg (void)
5920 rtx insns = get_insns ();
5922 /* We require accurate REG_DEAD notes. */
5923 compute_bb_for_insn ();
5924 df_note_add_problem ();
5925 df_analyze ();
5927 mep_reorg_addcombine (insns);
5928 #if EXPERIMENTAL_REGMOVE_REORG
5929 /* VLIW packing has been done already, so we can't just delete things. */
5930 if (!mep_vliw_function_p (cfun->decl))
5931 mep_reorg_regmove (insns);
5932 #endif
5933 mep_jmp_return_reorg (insns);
5934 mep_bundle_insns (insns);
5935 mep_reorg_repeat (insns);
5936 if (optimize
5937 && !profile_flag
5938 && !profile_arc_flag
5939 && TARGET_OPT_REPEAT
5940 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5941 mep_reorg_erepeat (insns);
5943 /* This may delete *insns so make sure it's last. */
5944 mep_reorg_noframe (insns);
5946 df_finish_pass (false);
5951 /*----------------------------------------------------------------------*/
5952 /* Builtins */
5953 /*----------------------------------------------------------------------*/
5955 /* Element X gives the index into cgen_insns[] of the most general
5956 implementation of intrinsic X. Unimplemented intrinsics are
5957 mapped to -1. */
5958 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5960 /* Element X gives the index of another instruction that is mapped to
5961 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5962 instruction.
5964 Things are set up so that mep_intrinsic_chain[X] < X. */
5965 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5967 /* The bitmask for the current ISA. The ISA masks are declared
5968 in mep-intrin.h. */
5969 unsigned int mep_selected_isa;
5971 struct mep_config {
5972 const char *config_name;
5973 unsigned int isa;
5976 static struct mep_config mep_configs[] = {
5977 #ifdef COPROC_SELECTION_TABLE
5978 COPROC_SELECTION_TABLE,
5979 #endif
5980 { 0, 0 }
5983 /* Initialize the global intrinsics variables above. */
5985 static void
5986 mep_init_intrinsics (void)
5988 size_t i;
5990 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5991 mep_selected_isa = mep_configs[0].isa;
5992 if (mep_config_string != 0)
5993 for (i = 0; mep_configs[i].config_name; i++)
5994 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5996 mep_selected_isa = mep_configs[i].isa;
5997 break;
6000 /* Assume all intrinsics are unavailable. */
6001 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
6002 mep_intrinsic_insn[i] = -1;
6004 /* Build up the global intrinsic tables. */
6005 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6006 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
6008 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
6009 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
6011 /* See whether we can directly move values between one coprocessor
6012 register and another. */
6013 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6014 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
6015 mep_have_copro_copro_moves_p = true;
6017 /* See whether we can directly move values between core and
6018 coprocessor registers. */
6019 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6020 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6022 mep_have_core_copro_moves_p = 1;
6025 /* Declare all available intrinsic functions. Called once only. */
6027 static tree cp_data_bus_int_type_node;
6028 static tree opaque_vector_type_node;
6029 static tree v8qi_type_node;
6030 static tree v4hi_type_node;
6031 static tree v2si_type_node;
6032 static tree v8uqi_type_node;
6033 static tree v4uhi_type_node;
6034 static tree v2usi_type_node;
6036 static tree
6037 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6039 switch (cr)
6041 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
6042 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
6043 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
6044 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
6045 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
6046 case cgen_regnum_operand_type_CHAR: return char_type_node;
6047 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
6048 case cgen_regnum_operand_type_SI: return intSI_type_node;
6049 case cgen_regnum_operand_type_DI: return intDI_type_node;
6050 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
6051 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
6052 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
6053 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
6054 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
6055 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6056 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6057 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6058 default:
6059 return void_type_node;
6063 static void
6064 mep_init_builtins (void)
6066 size_t i;
6068 if (TARGET_64BIT_CR_REGS)
6069 cp_data_bus_int_type_node = long_long_integer_type_node;
6070 else
6071 cp_data_bus_int_type_node = long_integer_type_node;
6073 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6074 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6075 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6076 v2si_type_node = build_vector_type (intSI_type_node, 2);
6077 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6078 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6079 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6081 (*lang_hooks.decls.pushdecl)
6082 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6083 cp_data_bus_int_type_node));
6085 (*lang_hooks.decls.pushdecl)
6086 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6087 opaque_vector_type_node));
6089 (*lang_hooks.decls.pushdecl)
6090 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6091 v8qi_type_node));
6092 (*lang_hooks.decls.pushdecl)
6093 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6094 v4hi_type_node));
6095 (*lang_hooks.decls.pushdecl)
6096 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6097 v2si_type_node));
6099 (*lang_hooks.decls.pushdecl)
6100 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6101 v8uqi_type_node));
6102 (*lang_hooks.decls.pushdecl)
6103 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6104 v4uhi_type_node));
6105 (*lang_hooks.decls.pushdecl)
6106 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6107 v2usi_type_node));
6109 /* Intrinsics like mep_cadd3 are implemented with two groups of
6110 instructions, one which uses UNSPECs and one which uses a specific
6111 rtl code such as PLUS. Instructions in the latter group belong
6112 to GROUP_KNOWN_CODE.
6114 In such cases, the intrinsic will have two entries in the global
6115 tables above. The unspec form is accessed using builtin functions
6116 while the specific form is accessed using the mep_* enum in
6117 mep-intrin.h.
6119 The idea is that __cop arithmetic and builtin functions have
6120 different optimization requirements. If mep_cadd3() appears in
6121 the source code, the user will surely except gcc to use cadd3
6122 rather than a work-alike such as add3. However, if the user
6123 just writes "a + b", where a or b are __cop variables, it is
6124 reasonable for gcc to choose a core instruction rather than
6125 cadd3 if it believes that is more optimal. */
6126 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6127 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6128 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6130 tree ret_type = void_type_node;
6131 tree bi_type;
6133 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6134 continue;
6136 if (cgen_insns[i].cret_p)
6137 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6139 bi_type = build_function_type (ret_type, 0);
6140 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6141 bi_type,
6142 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6146 /* Report the unavailablity of the given intrinsic. */
6148 #if 1
6149 static void
6150 mep_intrinsic_unavailable (int intrinsic)
6152 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6154 if (already_reported_p[intrinsic])
6155 return;
6157 if (mep_intrinsic_insn[intrinsic] < 0)
6158 error ("coprocessor intrinsic %qs is not available in this configuration",
6159 cgen_intrinsics[intrinsic]);
6160 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6161 error ("%qs is not available in VLIW functions",
6162 cgen_intrinsics[intrinsic]);
6163 else
6164 error ("%qs is not available in non-VLIW functions",
6165 cgen_intrinsics[intrinsic]);
6167 already_reported_p[intrinsic] = 1;
6169 #endif
6172 /* See if any implementation of INTRINSIC is available to the
6173 current function. If so, store the most general implementation
6174 in *INSN_PTR and return true. Return false otherwise. */
6176 static bool
6177 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6179 int i;
6181 i = mep_intrinsic_insn[intrinsic];
6182 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6183 i = mep_intrinsic_chain[i];
6185 if (i >= 0)
6187 *insn_ptr = &cgen_insns[i];
6188 return true;
6190 return false;
6194 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6195 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6196 try using a work-alike instead. In this case, the returned insn
6197 may have three operands rather than two. */
6199 static bool
6200 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6202 size_t i;
6204 if (intrinsic == mep_cmov)
6206 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6207 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6208 return true;
6209 return false;
6211 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6215 /* If ARG is a register operand that is the same size as MODE, convert it
6216 to MODE using a subreg. Otherwise return ARG as-is. */
6218 static rtx
6219 mep_convert_arg (enum machine_mode mode, rtx arg)
6221 if (GET_MODE (arg) != mode
6222 && register_operand (arg, VOIDmode)
6223 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6224 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6225 return arg;
6229 /* Apply regnum conversions to ARG using the description given by REGNUM.
6230 Return the new argument on success and null on failure. */
6232 static rtx
6233 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6235 if (regnum->count == 0)
6236 return arg;
6238 if (GET_CODE (arg) != CONST_INT
6239 || INTVAL (arg) < 0
6240 || INTVAL (arg) >= regnum->count)
6241 return 0;
6243 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6247 /* Try to make intrinsic argument ARG match the given operand.
6248 UNSIGNED_P is true if the argument has an unsigned type. */
6250 static rtx
6251 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6252 int unsigned_p)
6254 if (GET_CODE (arg) == CONST_INT)
6256 /* CONST_INTs can only be bound to integer operands. */
6257 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6258 return 0;
6260 else if (GET_CODE (arg) == CONST_DOUBLE)
6261 /* These hold vector constants. */;
6262 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6264 /* If the argument is a different size from what's expected, we must
6265 have a value in the right mode class in order to convert it. */
6266 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6267 return 0;
6269 /* If the operand is an rvalue, promote or demote it to match the
6270 operand's size. This might not need extra instructions when
6271 ARG is a register value. */
6272 if (operand->constraint[0] != '=')
6273 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6276 /* If the operand is an lvalue, bind the operand to a new register.
6277 The caller will copy this value into ARG after the main
6278 instruction. By doing this always, we produce slightly more
6279 optimal code. */
6280 /* But not for control registers. */
6281 if (operand->constraint[0] == '='
6282 && (! REG_P (arg)
6283 || ! (CONTROL_REGNO_P (REGNO (arg))
6284 || CCR_REGNO_P (REGNO (arg))
6285 || CR_REGNO_P (REGNO (arg)))
6287 return gen_reg_rtx (operand->mode);
6289 /* Try simple mode punning. */
6290 arg = mep_convert_arg (operand->mode, arg);
6291 if (operand->predicate (arg, operand->mode))
6292 return arg;
6294 /* See if forcing the argument into a register will make it match. */
6295 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6296 arg = force_reg (operand->mode, arg);
6297 else
6298 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6299 if (operand->predicate (arg, operand->mode))
6300 return arg;
6302 return 0;
6306 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6307 function FNNAME. OPERAND describes the operand to which ARGNUM
6308 is mapped. */
6310 static void
6311 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6312 int argnum, tree fnname)
6314 size_t i;
6316 if (GET_CODE (arg) == CONST_INT)
6317 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6318 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6320 const struct cgen_immediate_predicate *predicate;
6321 HOST_WIDE_INT argval;
6323 predicate = &cgen_immediate_predicates[i];
6324 argval = INTVAL (arg);
6325 if (argval < predicate->lower || argval >= predicate->upper)
6326 error ("argument %d of %qE must be in the range %d...%d",
6327 argnum, fnname, predicate->lower, predicate->upper - 1);
6328 else
6329 error ("argument %d of %qE must be a multiple of %d",
6330 argnum, fnname, predicate->align);
6331 return;
6334 error ("incompatible type for argument %d of %qE", argnum, fnname);
6337 static rtx
6338 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6339 rtx subtarget ATTRIBUTE_UNUSED,
6340 enum machine_mode mode ATTRIBUTE_UNUSED,
6341 int ignore ATTRIBUTE_UNUSED)
6343 rtx pat, op[10], arg[10];
6344 unsigned int a;
6345 int opindex, unsigned_p[10];
6346 tree fndecl, args;
6347 unsigned int n_args;
6348 tree fnname;
6349 const struct cgen_insn *cgen_insn;
6350 const struct insn_data *idata;
6351 int first_arg = 0;
6352 int return_type = void_type_node;
6353 int builtin_n_args;
6355 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6356 fnname = DECL_NAME (fndecl);
6358 /* Find out which instruction we should emit. Note that some coprocessor
6359 intrinsics may only be available in VLIW mode, or only in normal mode. */
6360 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6362 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6363 return error_mark_node;
6365 idata = &insn_data[cgen_insn->icode];
6367 builtin_n_args = cgen_insn->num_args;
6369 if (cgen_insn->cret_p)
6371 if (cgen_insn->cret_p > 1)
6372 builtin_n_args ++;
6373 first_arg = 1;
6374 return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6375 builtin_n_args --;
6378 /* Evaluate each argument. */
6379 n_args = call_expr_nargs (exp);
6381 if (n_args < builtin_n_args)
6383 error ("too few arguments to %qE", fnname);
6384 return error_mark_node;
6386 if (n_args > builtin_n_args)
6388 error ("too many arguments to %qE", fnname);
6389 return error_mark_node;
6392 for (a = first_arg; a < builtin_n_args+first_arg; a++)
6394 tree value;
6396 args = CALL_EXPR_ARG (exp, a-first_arg);
6398 value = args;
6400 #if 0
6401 if (cgen_insn->regnums[a].reference_p)
6403 if (TREE_CODE (value) != ADDR_EXPR)
6405 debug_tree(value);
6406 error ("argument %d of %qE must be an address", a+1, fnname);
6407 return error_mark_node;
6409 value = TREE_OPERAND (value, 0);
6411 #endif
6413 /* If the argument has been promoted to int, get the unpromoted
6414 value. This is necessary when sub-int memory values are bound
6415 to reference parameters. */
6416 if (TREE_CODE (value) == NOP_EXPR
6417 && TREE_TYPE (value) == integer_type_node
6418 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6419 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6420 < TYPE_PRECISION (TREE_TYPE (value))))
6421 value = TREE_OPERAND (value, 0);
6423 /* If the argument has been promoted to double, get the unpromoted
6424 SFmode value. This is necessary for FMAX support, for example. */
6425 if (TREE_CODE (value) == NOP_EXPR
6426 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6427 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6428 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6429 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6430 value = TREE_OPERAND (value, 0);
6432 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6433 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6434 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6435 if (cgen_insn->regnums[a].reference_p)
6437 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6438 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6440 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6442 if (arg[a] == 0)
6444 error ("argument %d of %qE must be in the range %d...%d",
6445 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6446 return error_mark_node;
6450 for (a=0; a<first_arg; a++)
6452 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6453 arg[a] = target;
6454 else
6455 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6458 /* Convert the arguments into a form suitable for the intrinsic.
6459 Report an error if this isn't possible. */
6460 for (opindex = 0; opindex < idata->n_operands; opindex++)
6462 a = cgen_insn->op_mapping[opindex];
6463 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6464 arg[a], unsigned_p[a]);
6465 if (op[opindex] == 0)
6467 mep_incompatible_arg (&idata->operand[opindex],
6468 arg[a], a + 1 - first_arg, fnname);
6469 return error_mark_node;
6473 /* Emit the instruction. */
6474 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6475 op[5], op[6], op[7], op[8], op[9]);
6477 if (GET_CODE (pat) == SET
6478 && GET_CODE (SET_DEST (pat)) == PC
6479 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6480 emit_jump_insn (pat);
6481 else
6482 emit_insn (pat);
6484 /* Copy lvalues back to their final locations. */
6485 for (opindex = 0; opindex < idata->n_operands; opindex++)
6486 if (idata->operand[opindex].constraint[0] == '=')
6488 a = cgen_insn->op_mapping[opindex];
6489 if (a >= first_arg)
6491 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6492 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6493 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6494 op[opindex]));
6495 else
6497 /* First convert the operand to the right mode, then copy it
6498 into the destination. Doing the conversion as a separate
6499 step (rather than using convert_move) means that we can
6500 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6501 refer to the same register. */
6502 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6503 op[opindex], unsigned_p[a]);
6504 if (!rtx_equal_p (arg[a], op[opindex]))
6505 emit_move_insn (arg[a], op[opindex]);
6510 if (first_arg > 0 && target && target != op[0])
6512 emit_move_insn (target, op[0]);
6515 return target;
6518 static bool
6519 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6521 return false;
6524 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6525 a global register. */
6527 static int
6528 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6530 int regno;
6531 rtx x = *loc;
6533 if (! x)
6534 return 0;
6536 switch (GET_CODE (x))
6538 case SUBREG:
6539 if (REG_P (SUBREG_REG (x)))
6541 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6542 && global_regs[subreg_regno (x)])
6543 return 1;
6544 return 0;
6546 break;
6548 case REG:
6549 regno = REGNO (x);
6550 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6551 return 1;
6552 return 0;
6554 case SCRATCH:
6555 case PC:
6556 case CC0:
6557 case CONST_INT:
6558 case CONST_DOUBLE:
6559 case CONST:
6560 case LABEL_REF:
6561 return 0;
6563 case CALL:
6564 /* A non-constant call might use a global register. */
6565 return 1;
6567 default:
6568 break;
6571 return 0;
6574 /* Returns nonzero if X mentions a global register. */
6576 static int
6577 global_reg_mentioned_p (rtx x)
6579 if (INSN_P (x))
6581 if (CALL_P (x))
6583 if (! RTL_CONST_OR_PURE_CALL_P (x))
6584 return 1;
6585 x = CALL_INSN_FUNCTION_USAGE (x);
6586 if (x == 0)
6587 return 0;
6589 else
6590 x = PATTERN (x);
6593 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6595 /* Scheduling hooks for VLIW mode.
6597 Conceptually this is very simple: we have a two-pack architecture
6598 that takes one core insn and one coprocessor insn to make up either
6599 a 32- or 64-bit instruction word (depending on the option bit set in
6600 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6601 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6602 and one 48-bit cop insn or two 32-bit core/cop insns.
6604 In practice, instruction selection will be a bear. Consider in
6605 VL64 mode the following insns
6607 add $1, 1
6608 cmov $cr0, $0
6610 these cannot pack, since the add is a 16-bit core insn and cmov
6611 is a 32-bit cop insn. However,
6613 add3 $1, $1, 1
6614 cmov $cr0, $0
6616 packs just fine. For good VLIW code generation in VL64 mode, we
6617 will have to have 32-bit alternatives for many of the common core
6618 insns. Not implemented. */
6620 static int
6621 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6623 int cost_specified;
6625 if (REG_NOTE_KIND (link) != 0)
6627 /* See whether INSN and DEP_INSN are intrinsics that set the same
6628 hard register. If so, it is more important to free up DEP_INSN
6629 than it is to free up INSN.
6631 Note that intrinsics like mep_mulr are handled differently from
6632 the equivalent mep.md patterns. In mep.md, if we don't care
6633 about the value of $lo and $hi, the pattern will just clobber
6634 the registers, not set them. Since clobbers don't count as
6635 output dependencies, it is often possible to reorder two mulrs,
6636 even after reload.
6638 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6639 so any pair of mep_mulr()s will be inter-dependent. We should
6640 therefore give the first mep_mulr() a higher priority. */
6641 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6642 && global_reg_mentioned_p (PATTERN (insn))
6643 && global_reg_mentioned_p (PATTERN (dep_insn)))
6644 return 1;
6646 /* If the dependence is an anti or output dependence, assume it
6647 has no cost. */
6648 return 0;
6651 /* If we can't recognize the insns, we can't really do anything. */
6652 if (recog_memoized (dep_insn) < 0)
6653 return cost;
6655 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6656 attribute instead. */
6657 if (!TARGET_H1)
6659 cost_specified = get_attr_latency (dep_insn);
6660 if (cost_specified != 0)
6661 return cost_specified;
6664 return cost;
6667 /* ??? We don't properly compute the length of a load/store insn,
6668 taking into account the addressing mode. */
6670 static int
6671 mep_issue_rate (void)
6673 return TARGET_IVC2 ? 3 : 2;
6676 /* Return true if function DECL was declared with the vliw attribute. */
6678 bool
6679 mep_vliw_function_p (tree decl)
6681 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6684 static rtx
6685 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6687 int i;
6689 for (i = nready - 1; i >= 0; --i)
6691 rtx insn = ready[i];
6692 if (recog_memoized (insn) >= 0
6693 && get_attr_slot (insn) == slot
6694 && get_attr_length (insn) == length)
6695 return insn;
6698 return NULL_RTX;
6701 static void
6702 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6704 int i;
6706 for (i = 0; i < nready; ++i)
6707 if (ready[i] == insn)
6709 for (; i < nready - 1; ++i)
6710 ready[i] = ready[i + 1];
6711 ready[i] = insn;
6712 return;
6715 gcc_unreachable ();
6718 static void
6719 mep_print_sched_insn (FILE *dump, rtx insn)
6721 const char *slots = "none";
6722 const char *name = NULL;
6723 int code;
6724 char buf[30];
6726 if (GET_CODE (PATTERN (insn)) == SET
6727 || GET_CODE (PATTERN (insn)) == PARALLEL)
6729 switch (get_attr_slots (insn))
6731 case SLOTS_CORE: slots = "core"; break;
6732 case SLOTS_C3: slots = "c3"; break;
6733 case SLOTS_P0: slots = "p0"; break;
6734 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6735 case SLOTS_P0_P1: slots = "p0,p1"; break;
6736 case SLOTS_P0S: slots = "p0s"; break;
6737 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6738 case SLOTS_P1: slots = "p1"; break;
6739 default:
6740 sprintf(buf, "%d", get_attr_slots (insn));
6741 slots = buf;
6742 break;
6745 if (GET_CODE (PATTERN (insn)) == USE)
6746 slots = "use";
6748 code = INSN_CODE (insn);
6749 if (code >= 0)
6750 name = get_insn_name (code);
6751 if (!name)
6752 name = "{unknown}";
6754 fprintf (dump,
6755 "insn %4d %4d %8s %s\n",
6756 code,
6757 INSN_UID (insn),
6758 name,
6759 slots);
6762 static int
6763 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6764 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6765 int *pnready, int clock ATTRIBUTE_UNUSED)
6767 int nready = *pnready;
6768 rtx core_insn, cop_insn;
6769 int i;
6771 if (dump && sched_verbose > 1)
6773 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6774 for (i=0; i<nready; i++)
6775 mep_print_sched_insn (dump, ready[i]);
6776 fprintf (dump, "\n");
6779 if (!mep_vliw_function_p (cfun->decl))
6780 return 1;
6781 if (nready < 2)
6782 return 1;
6784 /* IVC2 uses a DFA to determine what's ready and what's not. */
6785 if (TARGET_IVC2)
6786 return nready;
6788 /* We can issue either a core or coprocessor instruction.
6789 Look for a matched pair of insns to reorder. If we don't
6790 find any, don't second-guess the scheduler's priorities. */
6792 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6793 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6794 TARGET_OPT_VL64 ? 6 : 2)))
6796 else if (TARGET_OPT_VL64
6797 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6798 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6800 else
6801 /* We didn't find a pair. Issue the single insn at the head
6802 of the ready list. */
6803 return 1;
6805 /* Reorder the two insns first. */
6806 mep_move_ready_insn (ready, nready, core_insn);
6807 mep_move_ready_insn (ready, nready - 1, cop_insn);
6808 return 2;
6811 /* A for_each_rtx callback. Return true if *X is a register that is
6812 set by insn PREV. */
6814 static int
6815 mep_store_find_set (rtx *x, void *prev)
6817 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6820 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6821 not the containing insn. */
6823 static bool
6824 mep_store_data_bypass_1 (rtx prev, rtx pat)
6826 /* Cope with intrinsics like swcpa. */
6827 if (GET_CODE (pat) == PARALLEL)
6829 int i;
6831 for (i = 0; i < XVECLEN (pat, 0); i++)
6832 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6833 return true;
6835 return false;
6838 /* Check for some sort of store. */
6839 if (GET_CODE (pat) != SET
6840 || GET_CODE (SET_DEST (pat)) != MEM)
6841 return false;
6843 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6844 The first operand to the unspec is the store data and the other operands
6845 are used to calculate the address. */
6846 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6848 rtx src;
6849 int i;
6851 src = SET_SRC (pat);
6852 for (i = 1; i < XVECLEN (src, 0); i++)
6853 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6854 return false;
6856 return true;
6859 /* Otherwise just check that PREV doesn't modify any register mentioned
6860 in the memory destination. */
6861 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6864 /* Return true if INSN is a store instruction and if the store address
6865 has no true dependence on PREV. */
6867 bool
6868 mep_store_data_bypass_p (rtx prev, rtx insn)
6870 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6873 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6874 is a register other than LO or HI and if PREV sets *X. */
6876 static int
6877 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6879 return (REG_P (*x)
6880 && REGNO (*x) != LO_REGNO
6881 && REGNO (*x) != HI_REGNO
6882 && reg_set_p (*x, (const_rtx) prev));
6885 /* Return true if, apart from HI/LO, there are no true dependencies
6886 between multiplication instructions PREV and INSN. */
6888 bool
6889 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6891 rtx pat;
6893 pat = PATTERN (insn);
6894 if (GET_CODE (pat) == PARALLEL)
6895 pat = XVECEXP (pat, 0, 0);
6896 return (GET_CODE (pat) == SET
6897 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6900 /* Return true if INSN is an ldc instruction that issues to the
6901 MeP-h1 integer pipeline. This is true for instructions that
6902 read from PSW, LP, SAR, HI and LO. */
6904 bool
6905 mep_ipipe_ldc_p (rtx insn)
6907 rtx pat, src;
6909 pat = PATTERN (insn);
6911 /* Cope with instrinsics that set both a hard register and its shadow.
6912 The set of the hard register comes first. */
6913 if (GET_CODE (pat) == PARALLEL)
6914 pat = XVECEXP (pat, 0, 0);
6916 if (GET_CODE (pat) == SET)
6918 src = SET_SRC (pat);
6920 /* Cope with intrinsics. The first operand to the unspec is
6921 the source register. */
6922 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6923 src = XVECEXP (src, 0, 0);
6925 if (REG_P (src))
6926 switch (REGNO (src))
6928 case PSW_REGNO:
6929 case LP_REGNO:
6930 case SAR_REGNO:
6931 case HI_REGNO:
6932 case LO_REGNO:
6933 return true;
6936 return false;
6939 /* Create a VLIW bundle from core instruction CORE and coprocessor
6940 instruction COP. COP always satisfies INSN_P, but CORE can be
6941 either a new pattern or an existing instruction.
6943 Emit the bundle in place of COP and return it. */
6945 static rtx
6946 mep_make_bundle (rtx core, rtx cop)
6948 rtx insn;
6950 /* If CORE is an existing instruction, remove it, otherwise put
6951 the new pattern in an INSN harness. */
6952 if (INSN_P (core))
6953 remove_insn (core);
6954 else
6955 core = make_insn_raw (core);
6957 /* Generate the bundle sequence and replace COP with it. */
6958 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6959 insn = emit_insn_after (insn, cop);
6960 remove_insn (cop);
6962 /* Set up the links of the insns inside the SEQUENCE. */
6963 PREV_INSN (core) = PREV_INSN (insn);
6964 NEXT_INSN (core) = cop;
6965 PREV_INSN (cop) = core;
6966 NEXT_INSN (cop) = NEXT_INSN (insn);
6968 /* Set the VLIW flag for the coprocessor instruction. */
6969 PUT_MODE (core, VOIDmode);
6970 PUT_MODE (cop, BImode);
6972 /* Derive a location for the bundle. Individual instructions cannot
6973 have their own location because there can be no assembler labels
6974 between CORE and COP. */
6975 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6976 INSN_LOCATOR (core) = 0;
6977 INSN_LOCATOR (cop) = 0;
6979 return insn;
6982 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6984 static void
6985 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6987 rtx * pinsn = (rtx *) data;
6989 if (*pinsn && reg_mentioned_p (x, *pinsn))
6990 *pinsn = NULL_RTX;
6993 /* Return true if anything in insn X is (anti,output,true) dependent on
6994 anything in insn Y. */
6996 static int
6997 mep_insn_dependent_p (rtx x, rtx y)
6999 rtx tmp;
7001 gcc_assert (INSN_P (x));
7002 gcc_assert (INSN_P (y));
7004 tmp = PATTERN (y);
7005 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
7006 if (tmp == NULL_RTX)
7007 return 1;
7009 tmp = PATTERN (x);
7010 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
7011 if (tmp == NULL_RTX)
7012 return 1;
7014 return 0;
7017 static int
7018 core_insn_p (rtx insn)
7020 if (GET_CODE (PATTERN (insn)) == USE)
7021 return 0;
7022 if (get_attr_slot (insn) == SLOT_CORE)
7023 return 1;
7024 return 0;
7027 /* Mark coprocessor instructions that can be bundled together with
7028 the immediately preceeding core instruction. This is later used
7029 to emit the "+" that tells the assembler to create a VLIW insn.
7031 For unbundled insns, the assembler will automatically add coprocessor
7032 nops, and 16-bit core nops. Due to an apparent oversight in the
7033 spec, the assembler will _not_ automatically add 32-bit core nops,
7034 so we have to emit those here.
7036 Called from mep_insn_reorg. */
7038 static void
7039 mep_bundle_insns (rtx insns)
7041 rtx insn, last = NULL_RTX, first = NULL_RTX;
7042 int saw_scheduling = 0;
7044 /* Only do bundling if we're in vliw mode. */
7045 if (!mep_vliw_function_p (cfun->decl))
7046 return;
7048 /* The first insn in a bundle are TImode, the remainder are
7049 VOIDmode. After this function, the first has VOIDmode and the
7050 rest have BImode. */
7052 /* Note: this doesn't appear to be true for JUMP_INSNs. */
7054 /* First, move any NOTEs that are within a bundle, to the beginning
7055 of the bundle. */
7056 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7058 if (NOTE_P (insn) && first)
7059 /* Don't clear FIRST. */;
7061 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7062 first = insn;
7064 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7066 rtx note, prev;
7068 /* INSN is part of a bundle; FIRST is the first insn in that
7069 bundle. Move all intervening notes out of the bundle.
7070 In addition, since the debug pass may insert a label
7071 whenever the current line changes, set the location info
7072 for INSN to match FIRST. */
7074 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7076 note = PREV_INSN (insn);
7077 while (note && note != first)
7079 prev = PREV_INSN (note);
7081 if (NOTE_P (note))
7083 /* Remove NOTE from here... */
7084 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7085 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7086 /* ...and put it in here. */
7087 NEXT_INSN (note) = first;
7088 PREV_INSN (note) = PREV_INSN (first);
7089 NEXT_INSN (PREV_INSN (note)) = note;
7090 PREV_INSN (NEXT_INSN (note)) = note;
7093 note = prev;
7097 else if (!NONJUMP_INSN_P (insn))
7098 first = 0;
7101 /* Now fix up the bundles. */
7102 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7104 if (NOTE_P (insn))
7105 continue;
7107 if (!NONJUMP_INSN_P (insn))
7109 last = 0;
7110 continue;
7113 /* If we're not optimizing enough, there won't be scheduling
7114 info. We detect that here. */
7115 if (GET_MODE (insn) == TImode)
7116 saw_scheduling = 1;
7117 if (!saw_scheduling)
7118 continue;
7120 if (TARGET_IVC2)
7122 rtx core_insn = NULL_RTX;
7124 /* IVC2 slots are scheduled by DFA, so we just accept
7125 whatever the scheduler gives us. However, we must make
7126 sure the core insn (if any) is the first in the bundle.
7127 The IVC2 assembler can insert whatever NOPs are needed,
7128 and allows a COP insn to be first. */
7130 if (NONJUMP_INSN_P (insn)
7131 && GET_CODE (PATTERN (insn)) != USE
7132 && GET_MODE (insn) == TImode)
7134 for (last = insn;
7135 NEXT_INSN (last)
7136 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7137 && NONJUMP_INSN_P (NEXT_INSN (last));
7138 last = NEXT_INSN (last))
7140 if (core_insn_p (last))
7141 core_insn = last;
7143 if (core_insn_p (last))
7144 core_insn = last;
7146 if (core_insn && core_insn != insn)
7148 /* Swap core insn to first in the bundle. */
7150 /* Remove core insn. */
7151 if (PREV_INSN (core_insn))
7152 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7153 if (NEXT_INSN (core_insn))
7154 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7156 /* Re-insert core insn. */
7157 PREV_INSN (core_insn) = PREV_INSN (insn);
7158 NEXT_INSN (core_insn) = insn;
7160 if (PREV_INSN (core_insn))
7161 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7162 PREV_INSN (insn) = core_insn;
7164 PUT_MODE (core_insn, TImode);
7165 PUT_MODE (insn, VOIDmode);
7169 /* The first insn has TImode, the rest have VOIDmode */
7170 if (GET_MODE (insn) == TImode)
7171 PUT_MODE (insn, VOIDmode);
7172 else
7173 PUT_MODE (insn, BImode);
7174 continue;
7177 PUT_MODE (insn, VOIDmode);
7178 if (recog_memoized (insn) >= 0
7179 && get_attr_slot (insn) == SLOT_COP)
7181 if (GET_CODE (insn) == JUMP_INSN
7182 || ! last
7183 || recog_memoized (last) < 0
7184 || get_attr_slot (last) != SLOT_CORE
7185 || (get_attr_length (insn)
7186 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7187 || mep_insn_dependent_p (insn, last))
7189 switch (get_attr_length (insn))
7191 case 8:
7192 break;
7193 case 6:
7194 insn = mep_make_bundle (gen_nop (), insn);
7195 break;
7196 case 4:
7197 if (TARGET_OPT_VL64)
7198 insn = mep_make_bundle (gen_nop32 (), insn);
7199 break;
7200 case 2:
7201 if (TARGET_OPT_VL64)
7202 error ("2 byte cop instructions are"
7203 " not allowed in 64-bit VLIW mode");
7204 else
7205 insn = mep_make_bundle (gen_nop (), insn);
7206 break;
7207 default:
7208 error ("unexpected %d byte cop instruction",
7209 get_attr_length (insn));
7210 break;
7213 else
7214 insn = mep_make_bundle (last, insn);
7217 last = insn;
7222 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7223 Return true on success. This function can fail if the intrinsic
7224 is unavailable or if the operands don't satisfy their predicates. */
7226 bool
7227 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7229 const struct cgen_insn *cgen_insn;
7230 const struct insn_data *idata;
7231 rtx newop[10];
7232 int i;
7234 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7235 return false;
7237 idata = &insn_data[cgen_insn->icode];
7238 for (i = 0; i < idata->n_operands; i++)
7240 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7241 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7242 return false;
7245 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7246 newop[3], newop[4], newop[5],
7247 newop[6], newop[7], newop[8]));
7249 return true;
7253 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7254 OPERANDS[0]. Report an error if the instruction could not
7255 be synthesized. OPERANDS[1] is a register_operand. For sign
7256 and zero extensions, it may be smaller than SImode. */
7258 bool
7259 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7260 rtx * operands ATTRIBUTE_UNUSED)
7262 return false;
7266 /* Likewise, but apply a binary operation to OPERANDS[1] and
7267 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7268 can be a general_operand.
7270 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7271 third operand. REG and REG3 take register operands only. */
7273 bool
7274 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7275 int ATTRIBUTE_UNUSED immediate3,
7276 int ATTRIBUTE_UNUSED reg,
7277 int ATTRIBUTE_UNUSED reg3,
7278 rtx * operands ATTRIBUTE_UNUSED)
7280 return false;
7283 static bool
7284 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7286 switch (code)
7288 case CONST_INT:
7289 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7290 *total = 0;
7291 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7292 *total = 1;
7293 else
7294 *total = 3;
7295 return true;
7297 case SYMBOL_REF:
7298 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7299 return true;
7301 case MULT:
7302 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7303 ? COSTS_N_INSNS (3)
7304 : COSTS_N_INSNS (2));
7305 return true;
7307 return false;
7310 static int
7311 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7313 return 1;
7316 static bool
7317 mep_handle_option (size_t code,
7318 const char *arg ATTRIBUTE_UNUSED,
7319 int value ATTRIBUTE_UNUSED)
7321 int i;
7323 switch (code)
7325 case OPT_mall_opts:
7326 target_flags |= MEP_ALL_OPTS;
7327 break;
7329 case OPT_mno_opts:
7330 target_flags &= ~ MEP_ALL_OPTS;
7331 break;
7333 case OPT_mcop64:
7334 target_flags |= MASK_COP;
7335 target_flags |= MASK_64BIT_CR_REGS;
7336 break;
7338 case OPT_mtiny_:
7339 option_mtiny_specified = 1;
7341 case OPT_mivc2:
7342 target_flags |= MASK_COP;
7343 target_flags |= MASK_64BIT_CR_REGS;
7344 target_flags |= MASK_VLIW;
7345 target_flags |= MASK_OPT_VL64;
7346 target_flags |= MASK_IVC2;
7348 for (i=0; i<32; i++)
7349 fixed_regs[i+48] = 0;
7350 for (i=0; i<32; i++)
7351 call_used_regs[i+48] = 1;
7352 for (i=6; i<8; i++)
7353 call_used_regs[i+48] = 0;
7355 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7356 RN (0, "$csar0");
7357 RN (1, "$cc");
7358 RN (4, "$cofr0");
7359 RN (5, "$cofr1");
7360 RN (6, "$cofa0");
7361 RN (7, "$cofa1");
7362 RN (15, "$csar1");
7364 RN (16, "$acc0_0");
7365 RN (17, "$acc0_1");
7366 RN (18, "$acc0_2");
7367 RN (19, "$acc0_3");
7368 RN (20, "$acc0_4");
7369 RN (21, "$acc0_5");
7370 RN (22, "$acc0_6");
7371 RN (23, "$acc0_7");
7373 RN (24, "$acc1_0");
7374 RN (25, "$acc1_1");
7375 RN (26, "$acc1_2");
7376 RN (27, "$acc1_3");
7377 RN (28, "$acc1_4");
7378 RN (29, "$acc1_5");
7379 RN (30, "$acc1_6");
7380 RN (31, "$acc1_7");
7381 #undef RN
7383 break;
7385 default:
7386 break;
7388 return TRUE;
7391 static void
7392 mep_asm_init_sections (void)
7394 based_section
7395 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7396 "\t.section .based,\"aw\"");
7398 tinybss_section
7399 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7400 "\t.section .sbss,\"aw\"");
7402 sdata_section
7403 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7404 "\t.section .sdata,\"aw\",@progbits");
7406 far_section
7407 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7408 "\t.section .far,\"aw\"");
7410 farbss_section
7411 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7412 "\t.section .farbss,\"aw\"");
7414 frodata_section
7415 = get_unnamed_section (0, output_section_asm_op,
7416 "\t.section .frodata,\"a\"");
7418 srodata_section
7419 = get_unnamed_section (0, output_section_asm_op,
7420 "\t.section .srodata,\"a\"");
7422 vtext_section
7423 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7424 "\t.section .vtext,\"axv\"\n\t.vliw");
7426 vftext_section
7427 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7428 "\t.section .vftext,\"axv\"\n\t.vliw");
7430 ftext_section
7431 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7432 "\t.section .ftext,\"ax\"\n\t.core");
7436 #include "gt-mep.h"