mep.c (mep_vliw_jmp_match): New function.
[official-gcc.git] / gcc / config / mep / mep.c
blob80d201840552728a199ccb168d731abe3a9b91a1
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "obstack.h"
39 #include "tree.h"
40 #include "expr.h"
41 #include "except.h"
42 #include "function.h"
43 #include "optabs.h"
44 #include "reload.h"
45 #include "tm_p.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "integrate.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "df.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
58 + Reload Support
59 + Costs
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
62 + Operand Printing
63 + Function args in registers
64 + Handle pipeline hazards
65 + Handle attributes
66 + Trampolines
67 + Machine-dependent Reorg
68 + Builtins. */
70 /* Symbol encodings:
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
74 b - based
75 t - tiny
76 n - near
77 f - far
78 i - io, near
79 I - io, far
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
86 /* For varargs. */
87 int arg_regs_to_save;
88 int regsave_filler;
89 int frame_filler;
91 /* Records __builtin_return address. */
92 rtx eh_stack_adjust;
94 int reg_save_size;
95 int reg_save_slot[FIRST_PSEUDO_REGISTER];
96 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
98 /* 2 if the current function has an interrupt attribute, 1 if not, 0
99 if unknown. This is here because resource.c uses EPILOGUE_USES
100 which needs it. */
101 int interrupt_handler;
103 /* Likewise, for disinterrupt attribute. */
104 int disable_interrupts;
106 /* Number of doloop tags used so far. */
107 int doloop_tags;
109 /* True if the last tag was allocated to a doloop_end. */
110 bool doloop_tag_from_end;
112 /* True if reload changes $TP. */
113 bool reload_changes_tp;
115 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
116 We only set this if the function is an interrupt handler. */
117 int asms_without_operands;
120 #define MEP_CONTROL_REG(x) \
121 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
123 static const struct attribute_spec mep_attribute_table[11];
125 static GTY(()) section * based_section;
126 static GTY(()) section * tinybss_section;
127 static GTY(()) section * far_section;
128 static GTY(()) section * farbss_section;
129 static GTY(()) section * frodata_section;
130 static GTY(()) section * srodata_section;
132 static void mep_set_leaf_registers (int);
133 static bool symbol_p (rtx);
134 static bool symbolref_p (rtx);
135 static void encode_pattern_1 (rtx);
136 static void encode_pattern (rtx);
137 static bool const_in_range (rtx, int, int);
138 static void mep_rewrite_mult (rtx, rtx);
139 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
140 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
141 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
142 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
143 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
144 static bool mep_nongeneral_reg (rtx);
145 static bool mep_general_copro_reg (rtx);
146 static bool mep_nonregister (rtx);
147 static struct machine_function* mep_init_machine_status (void);
148 static rtx mep_tp_rtx (void);
149 static rtx mep_gp_rtx (void);
150 static bool mep_interrupt_p (void);
151 static bool mep_disinterrupt_p (void);
152 static bool mep_reg_set_p (rtx, rtx);
153 static bool mep_reg_set_in_function (int);
154 static bool mep_interrupt_saved_reg (int);
155 static bool mep_call_saves_register (int);
156 static rtx F (rtx);
157 static void add_constant (int, int, int, int);
158 static bool mep_function_uses_sp (void);
159 static rtx maybe_dead_move (rtx, rtx, bool);
160 static void mep_reload_pointer (int, const char *);
161 static void mep_start_function (FILE *, HOST_WIDE_INT);
162 static bool mep_function_ok_for_sibcall (tree, tree);
163 static int unique_bit_in (HOST_WIDE_INT);
164 static int bit_size_for_clip (HOST_WIDE_INT);
165 static int bytesize (const_tree, enum machine_mode);
166 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
167 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
168 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
169 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
170 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
171 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
172 static bool mep_function_attribute_inlinable_p (const_tree);
173 static bool mep_can_inline_p (tree, tree);
174 static bool mep_lookup_pragma_disinterrupt (const char *);
175 static int mep_multiple_address_regions (tree, bool);
176 static int mep_attrlist_to_encoding (tree, tree);
177 static void mep_insert_attributes (tree, tree *);
178 static void mep_encode_section_info (tree, rtx, int);
179 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
180 static void mep_unique_section (tree, int);
181 static unsigned int mep_section_type_flags (tree, const char *, int);
182 static void mep_asm_named_section (const char *, unsigned int, tree);
183 static bool mep_mentioned_p (rtx, rtx, int);
184 static void mep_reorg_regmove (rtx);
185 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
186 static void mep_reorg_repeat (rtx);
187 static bool mep_invertable_branch_p (rtx);
188 static void mep_invert_branch (rtx, rtx);
189 static void mep_reorg_erepeat (rtx);
190 static void mep_jmp_return_reorg (rtx);
191 static void mep_reorg_addcombine (rtx);
192 static void mep_reorg (void);
193 static void mep_init_intrinsics (void);
194 static void mep_init_builtins (void);
195 static void mep_intrinsic_unavailable (int);
196 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
197 static bool mep_get_move_insn (int, const struct cgen_insn **);
198 static rtx mep_convert_arg (enum machine_mode, rtx);
199 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
200 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
201 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
202 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
203 static int mep_adjust_cost (rtx, rtx, rtx, int);
204 static int mep_issue_rate (void);
205 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
206 static void mep_move_ready_insn (rtx *, int, rtx);
207 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
208 static rtx mep_make_bundle (rtx, rtx);
209 static void mep_bundle_insns (rtx);
210 static bool mep_rtx_cost (rtx, int, int, int *, bool);
211 static int mep_address_cost (rtx, bool);
212 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
213 tree, int *, int);
214 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
215 const_tree, bool);
216 static bool mep_vector_mode_supported_p (enum machine_mode);
217 static bool mep_handle_option (size_t, const char *, int);
218 static rtx mep_allocate_initial_value (rtx);
219 static void mep_asm_init_sections (void);
220 static int mep_comp_type_attributes (const_tree, const_tree);
221 static bool mep_narrow_volatile_bitfield (void);
222 static rtx mep_expand_builtin_saveregs (void);
223 static tree mep_build_builtin_va_list (void);
224 static void mep_expand_va_start (tree, rtx);
225 static tree mep_gimplify_va_arg_expr (tree, tree, tree *, tree *);
227 /* Initialize the GCC target structure. */
229 #undef TARGET_ASM_FUNCTION_PROLOGUE
230 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
231 #undef TARGET_ATTRIBUTE_TABLE
232 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
233 #undef TARGET_COMP_TYPE_ATTRIBUTES
234 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
235 #undef TARGET_INSERT_ATTRIBUTES
236 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
237 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
238 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
239 #undef TARGET_CAN_INLINE_P
240 #define TARGET_CAN_INLINE_P mep_can_inline_p
241 #undef TARGET_SECTION_TYPE_FLAGS
242 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
243 #undef TARGET_ASM_NAMED_SECTION
244 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
245 #undef TARGET_INIT_BUILTINS
246 #define TARGET_INIT_BUILTINS mep_init_builtins
247 #undef TARGET_EXPAND_BUILTIN
248 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
249 #undef TARGET_SCHED_ADJUST_COST
250 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
251 #undef TARGET_SCHED_ISSUE_RATE
252 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
253 #undef TARGET_SCHED_REORDER
254 #define TARGET_SCHED_REORDER mep_sched_reorder
255 #undef TARGET_STRIP_NAME_ENCODING
256 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
257 #undef TARGET_ASM_SELECT_SECTION
258 #define TARGET_ASM_SELECT_SECTION mep_select_section
259 #undef TARGET_ASM_UNIQUE_SECTION
260 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
261 #undef TARGET_ENCODE_SECTION_INFO
262 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
263 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
264 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
265 #undef TARGET_RTX_COSTS
266 #define TARGET_RTX_COSTS mep_rtx_cost
267 #undef TARGET_ADDRESS_COST
268 #define TARGET_ADDRESS_COST mep_address_cost
269 #undef TARGET_MACHINE_DEPENDENT_REORG
270 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
271 #undef TARGET_SETUP_INCOMING_VARARGS
272 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
273 #undef TARGET_PASS_BY_REFERENCE
274 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
275 #undef TARGET_VECTOR_MODE_SUPPORTED_P
276 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
277 #undef TARGET_HANDLE_OPTION
278 #define TARGET_HANDLE_OPTION mep_handle_option
279 #undef TARGET_DEFAULT_TARGET_FLAGS
280 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
281 #undef TARGET_ALLOCATE_INITIAL_VALUE
282 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
283 #undef TARGET_ASM_INIT_SECTIONS
284 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
285 #undef TARGET_RETURN_IN_MEMORY
286 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
287 #undef TARGET_NARROW_VOLATILE_BITFIELD
288 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
289 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
290 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
291 #undef TARGET_BUILD_BUILTIN_VA_LIST
292 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
293 #undef TARGET_EXPAND_BUILTIN_VA_START
294 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
295 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
296 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
298 struct gcc_target targetm = TARGET_INITIALIZER;
300 #define WANT_GCC_DEFINITIONS
301 #include "mep-intrin.h"
302 #undef WANT_GCC_DEFINITIONS
305 /* Command Line Option Support. */
307 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
309 /* True if we can use cmov instructions to move values back and forth
310 between core and coprocessor registers. */
311 bool mep_have_core_copro_moves_p;
313 /* True if we can use cmov instructions (or a work-alike) to move
314 values between coprocessor registers. */
315 bool mep_have_copro_copro_moves_p;
317 /* A table of all coprocessor instructions that can act like
318 a coprocessor-to-coprocessor cmov. */
319 static const int mep_cmov_insns[] = {
320 mep_cmov,
321 mep_cpmov,
322 mep_fmovs,
323 mep_caddi3,
324 mep_csubi3,
325 mep_candi3,
326 mep_cori3,
327 mep_cxori3,
328 mep_cand3,
329 mep_cor3
332 static int option_mtiny_specified = 0;
335 static void
336 mep_set_leaf_registers (int enable)
338 int i;
340 if (mep_leaf_registers[0] != enable)
341 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
342 mep_leaf_registers[i] = enable;
345 void
346 mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
348 int i;
350 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
352 fixed_regs[HI_REGNO] = 1;
353 fixed_regs[LO_REGNO] = 1;
354 call_used_regs[HI_REGNO] = 1;
355 call_used_regs[LO_REGNO] = 1;
358 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
359 global_regs[i] = 1;
362 void
363 mep_optimization_options (void)
365 /* The first scheduling pass often increases register pressure and tends
366 to result in more spill code. Only run it when specifically asked. */
367 flag_schedule_insns = 0;
369 /* Using $fp doesn't gain us much, even when debugging is important. */
370 flag_omit_frame_pointer = 1;
373 void
374 mep_override_options (void)
376 if (flag_pic == 1)
377 warning (OPT_fpic, "-fpic is not supported");
378 if (flag_pic == 2)
379 warning (OPT_fPIC, "-fPIC is not supported");
380 if (TARGET_S && TARGET_M)
381 error ("only one of -ms and -mm may be given");
382 if (TARGET_S && TARGET_L)
383 error ("only one of -ms and -ml may be given");
384 if (TARGET_M && TARGET_L)
385 error ("only one of -mm and -ml may be given");
386 if (TARGET_S && option_mtiny_specified)
387 error ("only one of -ms and -mtiny= may be given");
388 if (TARGET_M && option_mtiny_specified)
389 error ("only one of -mm and -mtiny= may be given");
390 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
391 warning (0, "-mclip currently has no effect without -mminmax");
393 if (mep_const_section)
395 if (strcmp (mep_const_section, "tiny") != 0
396 && strcmp (mep_const_section, "near") != 0
397 && strcmp (mep_const_section, "far") != 0)
398 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
401 if (TARGET_S)
402 mep_tiny_cutoff = 65536;
403 if (TARGET_M)
404 mep_tiny_cutoff = 0;
405 if (TARGET_L && ! option_mtiny_specified)
406 mep_tiny_cutoff = 0;
408 if (TARGET_64BIT_CR_REGS)
409 flag_split_wide_types = 0;
411 init_machine_status = mep_init_machine_status;
412 mep_init_intrinsics ();
415 /* Pattern Support - constraints, predicates, expanders. */
417 /* MEP has very few instructions that can refer to the span of
418 addresses used by symbols, so it's common to check for them. */
420 static bool
421 symbol_p (rtx x)
423 int c = GET_CODE (x);
425 return (c == CONST_INT
426 || c == CONST
427 || c == SYMBOL_REF);
430 static bool
431 symbolref_p (rtx x)
433 int c;
435 if (GET_CODE (x) != MEM)
436 return false;
438 c = GET_CODE (XEXP (x, 0));
439 return (c == CONST_INT
440 || c == CONST
441 || c == SYMBOL_REF);
444 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
446 #define GEN_REG(R, STRICT) \
447 (GR_REGNO_P (R) \
448 || (!STRICT \
449 && ((R) == ARG_POINTER_REGNUM \
450 || (R) >= FIRST_PSEUDO_REGISTER)))
452 static char pattern[12], *patternp;
453 static GTY(()) rtx patternr[12];
454 #define RTX_IS(x) (strcmp (pattern, x) == 0)
456 static void
457 encode_pattern_1 (rtx x)
459 int i;
461 if (patternp == pattern + sizeof (pattern) - 2)
463 patternp[-1] = '?';
464 return;
467 patternr[patternp-pattern] = x;
469 switch (GET_CODE (x))
471 case REG:
472 *patternp++ = 'r';
473 break;
474 case MEM:
475 *patternp++ = 'm';
476 case CONST:
477 encode_pattern_1 (XEXP(x, 0));
478 break;
479 case PLUS:
480 *patternp++ = '+';
481 encode_pattern_1 (XEXP(x, 0));
482 encode_pattern_1 (XEXP(x, 1));
483 break;
484 case LO_SUM:
485 *patternp++ = 'L';
486 encode_pattern_1 (XEXP(x, 0));
487 encode_pattern_1 (XEXP(x, 1));
488 break;
489 case HIGH:
490 *patternp++ = 'H';
491 encode_pattern_1 (XEXP(x, 0));
492 break;
493 case SYMBOL_REF:
494 *patternp++ = 's';
495 break;
496 case LABEL_REF:
497 *patternp++ = 'l';
498 break;
499 case CONST_INT:
500 case CONST_DOUBLE:
501 *patternp++ = 'i';
502 break;
503 case UNSPEC:
504 *patternp++ = 'u';
505 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
506 for (i=0; i<XVECLEN (x, 0); i++)
507 encode_pattern_1 (XVECEXP (x, 0, i));
508 break;
509 case USE:
510 *patternp++ = 'U';
511 break;
512 default:
513 *patternp++ = '?';
514 #if 0
515 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
516 debug_rtx (x);
517 gcc_unreachable ();
518 #endif
519 break;
523 static void
524 encode_pattern (rtx x)
526 patternp = pattern;
527 encode_pattern_1 (x);
528 *patternp = 0;
532 mep_section_tag (rtx x)
534 const char *name;
536 while (1)
538 switch (GET_CODE (x))
540 case MEM:
541 case CONST:
542 x = XEXP (x, 0);
543 break;
544 case UNSPEC:
545 x = XVECEXP (x, 0, 0);
546 break;
547 case PLUS:
548 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
549 return 0;
550 x = XEXP (x, 0);
551 break;
552 default:
553 goto done;
556 done:
557 if (GET_CODE (x) != SYMBOL_REF)
558 return 0;
559 name = XSTR (x, 0);
560 if (name[0] == '@' && name[2] == '.')
562 if (name[1] == 'i' || name[1] == 'I')
564 if (name[1] == 'I')
565 return 'f'; /* near */
566 return 'n'; /* far */
568 return name[1];
570 return 0;
574 mep_regno_reg_class (int regno)
576 switch (regno)
578 case SP_REGNO: return SP_REGS;
579 case TP_REGNO: return TP_REGS;
580 case GP_REGNO: return GP_REGS;
581 case 0: return R0_REGS;
582 case HI_REGNO: return HI_REGS;
583 case LO_REGNO: return LO_REGS;
584 case ARG_POINTER_REGNUM: return GENERAL_REGS;
587 if (GR_REGNO_P (regno))
588 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
589 if (CONTROL_REGNO_P (regno))
590 return CONTROL_REGS;
592 if (CR_REGNO_P (regno))
594 int i, j;
596 /* Search for the register amongst user-defined subclasses of
597 the coprocessor registers. */
598 for (i = USER0_REGS; i <= USER3_REGS; ++i)
600 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
601 continue;
602 for (j = 0; j < N_REG_CLASSES; ++j)
604 enum reg_class sub = reg_class_subclasses[i][j];
606 if (sub == LIM_REG_CLASSES)
607 return i;
608 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
609 break;
613 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
616 if (CCR_REGNO_P (regno))
617 return CCR_REGS;
619 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
620 return NO_REGS;
623 #if 0
625 mep_reg_class_from_constraint (int c, const char *str)
627 switch (c)
629 case 'a':
630 return SP_REGS;
631 case 'b':
632 return TP_REGS;
633 case 'c':
634 return CONTROL_REGS;
635 case 'd':
636 return HILO_REGS;
637 case 'e':
639 switch (str[1])
641 case 'm':
642 return LOADABLE_CR_REGS;
643 case 'x':
644 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
645 case 'r':
646 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
647 default:
648 return NO_REGS;
651 case 'h':
652 return HI_REGS;
653 case 'j':
654 return RPC_REGS;
655 case 'l':
656 return LO_REGS;
657 case 't':
658 return TPREL_REGS;
659 case 'v':
660 return GP_REGS;
661 case 'x':
662 return CR_REGS;
663 case 'y':
664 return CCR_REGS;
665 case 'z':
666 return R0_REGS;
668 case 'A':
669 case 'B':
670 case 'C':
671 case 'D':
673 enum reg_class which = c - 'A' + USER0_REGS;
674 return (reg_class_size[which] > 0 ? which : NO_REGS);
677 default:
678 return NO_REGS;
682 bool
683 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
685 switch (c)
687 case 'I': return value >= -32768 && value < 32768;
688 case 'J': return value >= 0 && value < 65536;
689 case 'K': return value >= 0 && value < 0x01000000;
690 case 'L': return value >= -32 && value < 32;
691 case 'M': return value >= 0 && value < 32;
692 case 'N': return value >= 0 && value < 16;
693 case 'O':
694 if (value & 0xffff)
695 return false;
696 return value >= -2147483647-1 && value <= 2147483647;
697 default:
698 gcc_unreachable ();
702 bool
703 mep_extra_constraint (rtx value, int c)
705 encode_pattern (value);
707 switch (c)
709 case 'R':
710 /* For near symbols, like what call uses. */
711 if (GET_CODE (value) == REG)
712 return 0;
713 return mep_call_address_operand (value, GET_MODE (value));
715 case 'S':
716 /* For signed 8-bit immediates. */
717 return (GET_CODE (value) == CONST_INT
718 && INTVAL (value) >= -128
719 && INTVAL (value) <= 127);
721 case 'T':
722 /* For tp/gp relative symbol values. */
723 return (RTX_IS ("u3s") || RTX_IS ("u2s")
724 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
726 case 'U':
727 /* Non-absolute memories. */
728 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
730 case 'W':
731 /* %hi(sym) */
732 return RTX_IS ("Hs");
734 case 'Y':
735 /* Register indirect. */
736 return RTX_IS ("mr");
738 case 'Z':
739 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
742 return false;
744 #endif
746 #undef PASS
747 #undef FAIL
749 static bool
750 const_in_range (rtx x, int minv, int maxv)
752 return (GET_CODE (x) == CONST_INT
753 && INTVAL (x) >= minv
754 && INTVAL (x) <= maxv);
757 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
758 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
759 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
760 at the end of the insn stream. */
763 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
765 if (rtx_equal_p (dest, src1))
766 return src2;
767 else if (rtx_equal_p (dest, src2))
768 return src1;
769 else
771 if (insn == 0)
772 emit_insn (gen_movsi (copy_rtx (dest), src1));
773 else
774 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
775 return src2;
779 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
780 Change the last element of PATTERN from (clobber (scratch:SI))
781 to (clobber (reg:SI HI_REGNO)). */
783 static void
784 mep_rewrite_mult (rtx insn, rtx pattern)
786 rtx hi_clobber;
788 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
789 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
790 PATTERN (insn) = pattern;
791 INSN_CODE (insn) = -1;
794 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
795 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
796 store the result in DEST if nonnull. */
798 static void
799 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
801 rtx lo, pattern;
803 lo = gen_rtx_REG (SImode, LO_REGNO);
804 if (dest)
805 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
806 mep_mulr_source (insn, dest, src1, src2));
807 else
808 pattern = gen_mulsi3_lo (lo, src1, src2);
809 mep_rewrite_mult (insn, pattern);
812 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
813 SRC3 into $lo, then use either madd or maddr. The move into $lo will
814 be deleted by a peephole2 if SRC3 is already in $lo. */
816 static void
817 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
819 rtx lo, pattern;
821 lo = gen_rtx_REG (SImode, LO_REGNO);
822 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
823 if (dest)
824 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
825 mep_mulr_source (insn, dest, src1, src2),
826 copy_rtx (lo));
827 else
828 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
829 mep_rewrite_mult (insn, pattern);
832 /* Return true if $lo has the same value as integer register GPR when
833 instruction INSN is reached. If necessary, rewrite the instruction
834 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
835 rtx for (reg:SI LO_REGNO).
837 This function is intended to be used by the peephole2 pass. Since
838 that pass goes from the end of a basic block to the beginning, and
839 propagates liveness information on the way, there is no need to
840 update register notes here.
842 If GPR_DEAD_P is true on entry, and this function returns true,
843 then the caller will replace _every_ use of GPR in and after INSN
844 with LO. This means that if the instruction that sets $lo is a
845 mulr- or maddr-type instruction, we can rewrite it to use mul or
846 madd instead. In combination with the copy progagation pass,
847 this allows us to replace sequences like:
849 mov GPR,R1
850 mulr GPR,R2
852 with:
854 mul R1,R2
856 if GPR is no longer used. */
858 static bool
859 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
863 insn = PREV_INSN (insn);
864 if (INSN_P (insn))
865 switch (recog_memoized (insn))
867 case CODE_FOR_mulsi3_1:
868 extract_insn (insn);
869 if (rtx_equal_p (recog_data.operand[0], gpr))
871 mep_rewrite_mulsi3 (insn,
872 gpr_dead_p ? NULL : recog_data.operand[0],
873 recog_data.operand[1],
874 recog_data.operand[2]);
875 return true;
877 return false;
879 case CODE_FOR_maddsi3:
880 extract_insn (insn);
881 if (rtx_equal_p (recog_data.operand[0], gpr))
883 mep_rewrite_maddsi3 (insn,
884 gpr_dead_p ? NULL : recog_data.operand[0],
885 recog_data.operand[1],
886 recog_data.operand[2],
887 recog_data.operand[3]);
888 return true;
890 return false;
892 case CODE_FOR_mulsi3r:
893 case CODE_FOR_maddsi3r:
894 extract_insn (insn);
895 return rtx_equal_p (recog_data.operand[1], gpr);
897 default:
898 if (reg_set_p (lo, insn)
899 || reg_set_p (gpr, insn)
900 || volatile_insn_p (PATTERN (insn)))
901 return false;
903 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
904 gpr_dead_p = false;
905 break;
908 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
909 return false;
912 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
914 bool
915 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
917 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
918 extract_insn (insn);
919 return result;
922 /* Return true if SET can be turned into a post-modify load or store
923 that adds OFFSET to GPR. In other words, return true if SET can be
924 changed into:
926 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
928 It's OK to change SET to an equivalent operation in order to
929 make it match. */
931 static bool
932 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
934 rtx *reg, *mem;
935 unsigned int reg_bytes, mem_bytes;
936 enum machine_mode reg_mode, mem_mode;
938 /* Only simple SETs can be converted. */
939 if (GET_CODE (set) != SET)
940 return false;
942 /* Point REG to what we hope will be the register side of the set and
943 MEM to what we hope will be the memory side. */
944 if (GET_CODE (SET_DEST (set)) == MEM)
946 mem = &SET_DEST (set);
947 reg = &SET_SRC (set);
949 else
951 reg = &SET_DEST (set);
952 mem = &SET_SRC (set);
953 if (GET_CODE (*mem) == SIGN_EXTEND)
954 mem = &XEXP (*mem, 0);
957 /* Check that *REG is a suitable coprocessor register. */
958 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
959 return false;
961 /* Check that *MEM is a suitable memory reference. */
962 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
963 return false;
965 /* Get the number of bytes in each operand. */
966 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
967 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
969 /* Check that OFFSET is suitably aligned. */
970 if (INTVAL (offset) & (mem_bytes - 1))
971 return false;
973 /* Convert *MEM to a normal integer mode. */
974 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
975 *mem = change_address (*mem, mem_mode, NULL);
977 /* Adjust *REG as well. */
978 *reg = shallow_copy_rtx (*reg);
979 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
981 /* SET is a subword load. Convert it to an explicit extension. */
982 PUT_MODE (*reg, SImode);
983 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
985 else
987 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
988 PUT_MODE (*reg, reg_mode);
990 return true;
993 /* Return the effect of frame-related instruction INSN. */
995 static rtx
996 mep_frame_expr (rtx insn)
998 rtx note, expr;
1000 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
1001 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
1002 RTX_FRAME_RELATED_P (expr) = 1;
1003 return expr;
1006 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1007 new pattern in INSN1; INSN2 will be deleted by the caller. */
1009 static void
1010 mep_make_parallel (rtx insn1, rtx insn2)
1012 rtx expr;
1014 if (RTX_FRAME_RELATED_P (insn2))
1016 expr = mep_frame_expr (insn2);
1017 if (RTX_FRAME_RELATED_P (insn1))
1018 expr = gen_rtx_SEQUENCE (VOIDmode,
1019 gen_rtvec (2, mep_frame_expr (insn1), expr));
1020 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1021 RTX_FRAME_RELATED_P (insn1) = 1;
1024 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1025 gen_rtvec (2, PATTERN (insn1),
1026 PATTERN (insn2)));
1027 INSN_CODE (insn1) = -1;
1030 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1031 the basic block to see if any previous load or store instruction can
1032 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1034 static bool
1035 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1037 rtx insn;
1039 insn = set_insn;
1042 insn = PREV_INSN (insn);
1043 if (INSN_P (insn))
1045 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1047 mep_make_parallel (insn, set_insn);
1048 return true;
1051 if (reg_set_p (reg, insn)
1052 || reg_referenced_p (reg, PATTERN (insn))
1053 || volatile_insn_p (PATTERN (insn)))
1054 return false;
1057 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1058 return false;
1061 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1063 bool
1064 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1066 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1067 extract_insn (insn);
1068 return result;
1071 bool
1072 mep_allow_clip (rtx ux, rtx lx, int s)
1074 HOST_WIDE_INT u = INTVAL (ux);
1075 HOST_WIDE_INT l = INTVAL (lx);
1076 int i;
1078 if (!TARGET_OPT_CLIP)
1079 return false;
1081 if (s)
1083 for (i = 0; i < 30; i ++)
1084 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1085 && (l == - ((HOST_WIDE_INT) 1 << i)))
1086 return true;
1088 else
1090 if (l != 0)
1091 return false;
1093 for (i = 0; i < 30; i ++)
1094 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1095 return true;
1097 return false;
1100 bool
1101 mep_bit_position_p (rtx x, bool looking_for)
1103 if (GET_CODE (x) != CONST_INT)
1104 return false;
1105 switch ((int) INTVAL(x) & 0xff)
1107 case 0x01: case 0x02: case 0x04: case 0x08:
1108 case 0x10: case 0x20: case 0x40: case 0x80:
1109 return looking_for;
1110 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1111 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1112 return !looking_for;
1114 return false;
1117 static bool
1118 move_needs_splitting (rtx dest, rtx src,
1119 enum machine_mode mode ATTRIBUTE_UNUSED)
1121 int s = mep_section_tag (src);
1123 while (1)
1125 if (GET_CODE (src) == CONST
1126 || GET_CODE (src) == MEM)
1127 src = XEXP (src, 0);
1128 else if (GET_CODE (src) == SYMBOL_REF
1129 || GET_CODE (src) == LABEL_REF
1130 || GET_CODE (src) == PLUS)
1131 break;
1132 else
1133 return false;
1135 if (s == 'f'
1136 || (GET_CODE (src) == PLUS
1137 && GET_CODE (XEXP (src, 1)) == CONST_INT
1138 && (INTVAL (XEXP (src, 1)) < -65536
1139 || INTVAL (XEXP (src, 1)) > 0xffffff))
1140 || (GET_CODE (dest) == REG
1141 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1142 return true;
1143 return false;
1146 bool
1147 mep_split_mov (rtx *operands, int symbolic)
1149 if (symbolic)
1151 if (move_needs_splitting (operands[0], operands[1], SImode))
1152 return true;
1153 return false;
1156 if (GET_CODE (operands[1]) != CONST_INT)
1157 return false;
1159 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1160 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1161 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1162 return false;
1164 if (((!reload_completed && !reload_in_progress)
1165 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1166 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1167 return false;
1169 return true;
1172 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1173 it to one specific value. So the insn chosen depends on whether
1174 the source and destination modes match. */
1176 bool
1177 mep_vliw_mode_match (rtx tgt)
1179 bool src_vliw = mep_vliw_function_p (cfun->decl);
1180 bool tgt_vliw = INTVAL (tgt);
1182 return src_vliw == tgt_vliw;
1185 /* Like the above, but also test for near/far mismatches. */
1187 bool
1188 mep_vliw_jmp_match (rtx tgt)
1190 bool src_vliw = mep_vliw_function_p (cfun->decl);
1191 bool tgt_vliw = INTVAL (tgt);
1193 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1194 return false;
1196 return src_vliw == tgt_vliw;
1199 bool
1200 mep_multi_slot (rtx x)
1202 return get_attr_slot (x) == SLOT_MULTI;
1206 /* Be careful not to use macros that need to be compiled one way for
1207 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1209 bool
1210 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1212 int the_tag;
1214 #define DEBUG_LEGIT 0
1215 #if DEBUG_LEGIT
1216 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1217 debug_rtx (x);
1218 #endif
1220 if (GET_CODE (x) == LO_SUM
1221 && GET_CODE (XEXP (x, 0)) == REG
1222 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1223 && CONSTANT_P (XEXP (x, 1)))
1225 if (GET_MODE_SIZE (mode) > 4)
1227 /* We will end up splitting this, and lo_sums are not
1228 offsettable for us. */
1229 #if DEBUG_LEGIT
1230 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1231 #endif
1232 return false;
1234 #if DEBUG_LEGIT
1235 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1236 #endif
1237 return true;
1240 if (GET_CODE (x) == REG
1241 && GEN_REG (REGNO (x), strict))
1243 #if DEBUG_LEGIT
1244 fprintf (stderr, " - yup, [reg]\n");
1245 #endif
1246 return true;
1249 if (GET_CODE (x) == PLUS
1250 && GET_CODE (XEXP (x, 0)) == REG
1251 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1252 && const_in_range (XEXP (x, 1), -32768, 32767))
1254 #if DEBUG_LEGIT
1255 fprintf (stderr, " - yup, [reg+const]\n");
1256 #endif
1257 return true;
1260 if (GET_CODE (x) == PLUS
1261 && GET_CODE (XEXP (x, 0)) == REG
1262 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1263 && GET_CODE (XEXP (x, 1)) == CONST
1264 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1265 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1266 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1267 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1269 #if DEBUG_LEGIT
1270 fprintf (stderr, " - yup, [reg+unspec]\n");
1271 #endif
1272 return true;
1275 the_tag = mep_section_tag (x);
1277 if (the_tag == 'f')
1279 #if DEBUG_LEGIT
1280 fprintf (stderr, " - nope, [far]\n");
1281 #endif
1282 return false;
1285 if (mode == VOIDmode
1286 && GET_CODE (x) == SYMBOL_REF)
1288 #if DEBUG_LEGIT
1289 fprintf (stderr, " - yup, call [symbol]\n");
1290 #endif
1291 return true;
1294 if ((mode == SImode || mode == SFmode)
1295 && CONSTANT_P (x)
1296 && LEGITIMATE_CONSTANT_P (x)
1297 && the_tag != 't' && the_tag != 'b')
1299 if (GET_CODE (x) != CONST_INT
1300 || (INTVAL (x) <= 0xfffff
1301 && INTVAL (x) >= 0
1302 && (INTVAL (x) % 4) == 0))
1304 #if DEBUG_LEGIT
1305 fprintf (stderr, " - yup, [const]\n");
1306 #endif
1307 return true;
1311 #if DEBUG_LEGIT
1312 fprintf (stderr, " - nope.\n");
1313 #endif
1314 return false;
1318 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1319 enum reload_type type,
1320 int ind_levels ATTRIBUTE_UNUSED)
1322 if (GET_CODE (*x) == PLUS
1323 && GET_CODE (XEXP (*x, 0)) == MEM
1324 && GET_CODE (XEXP (*x, 1)) == REG)
1326 /* GCC will by default copy the MEM into a REG, which results in
1327 an invalid address. For us, the best thing to do is move the
1328 whole expression to a REG. */
1329 push_reload (*x, NULL_RTX, x, NULL,
1330 GENERAL_REGS, mode, VOIDmode,
1331 0, 0, opnum, type);
1332 return 1;
1335 if (GET_CODE (*x) == PLUS
1336 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1337 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1339 char e = mep_section_tag (XEXP (*x, 0));
1341 if (e != 't' && e != 'b')
1343 /* GCC thinks that (sym+const) is a valid address. Well,
1344 sometimes it is, this time it isn't. The best thing to
1345 do is reload the symbol to a register, since reg+int
1346 tends to work, and we can't just add the symbol and
1347 constant anyway. */
1348 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1349 GENERAL_REGS, mode, VOIDmode,
1350 0, 0, opnum, type);
1351 return 1;
1354 return 0;
1358 mep_core_address_length (rtx insn, int opn)
1360 rtx set = single_set (insn);
1361 rtx mem = XEXP (set, opn);
1362 rtx other = XEXP (set, 1-opn);
1363 rtx addr = XEXP (mem, 0);
1365 if (register_operand (addr, Pmode))
1366 return 2;
1367 if (GET_CODE (addr) == PLUS)
1369 rtx addend = XEXP (addr, 1);
1371 gcc_assert (REG_P (XEXP (addr, 0)));
1373 switch (REGNO (XEXP (addr, 0)))
1375 case STACK_POINTER_REGNUM:
1376 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1377 && mep_imm7a4_operand (addend, VOIDmode))
1378 return 2;
1379 break;
1381 case 13: /* TP */
1382 gcc_assert (REG_P (other));
1384 if (REGNO (other) >= 8)
1385 break;
1387 if (GET_CODE (addend) == CONST
1388 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1389 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1390 return 2;
1392 if (GET_CODE (addend) == CONST_INT
1393 && INTVAL (addend) >= 0
1394 && INTVAL (addend) <= 127
1395 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1396 return 2;
1397 break;
1401 return 4;
1405 mep_cop_address_length (rtx insn, int opn)
1407 rtx set = single_set (insn);
1408 rtx mem = XEXP (set, opn);
1409 rtx addr = XEXP (mem, 0);
1411 if (GET_CODE (mem) != MEM)
1412 return 2;
1413 if (register_operand (addr, Pmode))
1414 return 2;
1415 if (GET_CODE (addr) == POST_INC)
1416 return 2;
1418 return 4;
1421 #define DEBUG_EXPAND_MOV 0
1422 bool
1423 mep_expand_mov (rtx *operands, enum machine_mode mode)
1425 int i, t;
1426 int tag[2];
1427 rtx tpsym, tpoffs;
1428 int post_reload = 0;
1430 tag[0] = mep_section_tag (operands[0]);
1431 tag[1] = mep_section_tag (operands[1]);
1433 if (!reload_in_progress
1434 && !reload_completed
1435 && GET_CODE (operands[0]) != REG
1436 && GET_CODE (operands[0]) != SUBREG
1437 && GET_CODE (operands[1]) != REG
1438 && GET_CODE (operands[1]) != SUBREG)
1439 operands[1] = copy_to_mode_reg (mode, operands[1]);
1441 #if DEBUG_EXPAND_MOV
1442 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1443 reload_in_progress || reload_completed);
1444 debug_rtx (operands[0]);
1445 debug_rtx (operands[1]);
1446 #endif
1448 if (mode == DImode || mode == DFmode)
1449 return false;
1451 if (reload_in_progress || reload_completed)
1453 rtx r;
1455 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1456 cfun->machine->reload_changes_tp = true;
1458 if (tag[0] == 't' || tag[1] == 't')
1460 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1461 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1462 post_reload = 1;
1464 if (tag[0] == 'b' || tag[1] == 'b')
1466 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1467 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1468 post_reload = 1;
1470 if (cfun->machine->reload_changes_tp == true)
1471 post_reload = 1;
1474 if (!post_reload)
1476 rtx n;
1477 if (symbol_p (operands[1]))
1479 t = mep_section_tag (operands[1]);
1480 if (t == 'b' || t == 't')
1483 if (GET_CODE (operands[1]) == SYMBOL_REF)
1485 tpsym = operands[1];
1486 n = gen_rtx_UNSPEC (mode,
1487 gen_rtvec (1, operands[1]),
1488 t == 'b' ? UNS_TPREL : UNS_GPREL);
1489 n = gen_rtx_CONST (mode, n);
1491 else if (GET_CODE (operands[1]) == CONST
1492 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1493 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1494 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1496 tpsym = XEXP (XEXP (operands[1], 0), 0);
1497 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1498 n = gen_rtx_UNSPEC (mode,
1499 gen_rtvec (1, tpsym),
1500 t == 'b' ? UNS_TPREL : UNS_GPREL);
1501 n = gen_rtx_PLUS (mode, n, tpoffs);
1502 n = gen_rtx_CONST (mode, n);
1504 else if (GET_CODE (operands[1]) == CONST
1505 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1506 return false;
1507 else
1509 error ("unusual TP-relative address");
1510 return false;
1513 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1514 : mep_gp_rtx ()), n);
1515 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1516 #if DEBUG_EXPAND_MOV
1517 fprintf(stderr, "mep_expand_mov emitting ");
1518 debug_rtx(n);
1519 #endif
1520 return true;
1524 for (i=0; i < 2; i++)
1526 t = mep_section_tag (operands[i]);
1527 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1529 rtx sym, n, r;
1530 int u;
1532 sym = XEXP (operands[i], 0);
1533 if (GET_CODE (sym) == CONST
1534 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1535 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1537 if (t == 'b')
1539 r = mep_tp_rtx ();
1540 u = UNS_TPREL;
1542 else
1544 r = mep_gp_rtx ();
1545 u = UNS_GPREL;
1548 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1549 n = gen_rtx_CONST (Pmode, n);
1550 n = gen_rtx_PLUS (Pmode, r, n);
1551 operands[i] = replace_equiv_address (operands[i], n);
1556 if ((GET_CODE (operands[1]) != REG
1557 && MEP_CONTROL_REG (operands[0]))
1558 || (GET_CODE (operands[0]) != REG
1559 && MEP_CONTROL_REG (operands[1])))
1561 rtx temp;
1562 #if DEBUG_EXPAND_MOV
1563 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1564 #endif
1565 temp = gen_reg_rtx (mode);
1566 emit_move_insn (temp, operands[1]);
1567 operands[1] = temp;
1570 if (symbolref_p (operands[0])
1571 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1572 || (GET_MODE_SIZE (mode) != 4)))
1574 rtx temp;
1576 gcc_assert (!reload_in_progress && !reload_completed);
1578 temp = force_reg (Pmode, XEXP (operands[0], 0));
1579 operands[0] = replace_equiv_address (operands[0], temp);
1580 emit_move_insn (operands[0], operands[1]);
1581 return true;
1584 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1585 tag[1] = 0;
1587 if (symbol_p (operands[1])
1588 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1590 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1591 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1592 return true;
1595 if (symbolref_p (operands[1])
1596 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1598 rtx temp;
1600 if (reload_in_progress || reload_completed)
1601 temp = operands[0];
1602 else
1603 temp = gen_reg_rtx (Pmode);
1605 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1606 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1607 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1608 return true;
1611 return false;
1614 /* Cases where the pattern can't be made to use at all. */
1616 bool
1617 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1619 int i;
1621 #define DEBUG_MOV_OK 0
1622 #if DEBUG_MOV_OK
1623 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1624 mep_section_tag (operands[1]));
1625 debug_rtx (operands[0]);
1626 debug_rtx (operands[1]);
1627 #endif
1629 /* We want the movh patterns to get these. */
1630 if (GET_CODE (operands[1]) == HIGH)
1631 return false;
1633 /* We can't store a register to a far variable without using a
1634 scratch register to hold the address. Using far variables should
1635 be split by mep_emit_mov anyway. */
1636 if (mep_section_tag (operands[0]) == 'f'
1637 || mep_section_tag (operands[1]) == 'f')
1639 #if DEBUG_MOV_OK
1640 fprintf (stderr, " - no, f\n");
1641 #endif
1642 return false;
1644 i = mep_section_tag (operands[1]);
1645 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1646 /* These are supposed to be generated with adds of the appropriate
1647 register. During and after reload, however, we allow them to
1648 be accessed as normal symbols because adding a dependency on
1649 the base register now might cause problems. */
1651 #if DEBUG_MOV_OK
1652 fprintf (stderr, " - no, bt\n");
1653 #endif
1654 return false;
1657 /* The only moves we can allow involve at least one general
1658 register, so require it. */
1659 for (i = 0; i < 2; i ++)
1661 /* Allow subregs too, before reload. */
1662 rtx x = operands[i];
1664 if (GET_CODE (x) == SUBREG)
1665 x = XEXP (x, 0);
1666 if (GET_CODE (x) == REG
1667 && ! MEP_CONTROL_REG (x))
1669 #if DEBUG_MOV_OK
1670 fprintf (stderr, " - ok\n");
1671 #endif
1672 return true;
1675 #if DEBUG_MOV_OK
1676 fprintf (stderr, " - no, no gen reg\n");
1677 #endif
1678 return false;
1681 #define DEBUG_SPLIT_WIDE_MOVE 0
1682 void
1683 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1685 int i;
1687 #if DEBUG_SPLIT_WIDE_MOVE
1688 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1689 debug_rtx (operands[0]);
1690 debug_rtx (operands[1]);
1691 #endif
1693 for (i = 0; i <= 1; i++)
1695 rtx op = operands[i], hi, lo;
1697 switch (GET_CODE (op))
1699 case REG:
1701 unsigned int regno = REGNO (op);
1703 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1705 rtx i32;
1707 lo = gen_rtx_REG (SImode, regno);
1708 i32 = GEN_INT (32);
1709 hi = gen_rtx_ZERO_EXTRACT (SImode,
1710 gen_rtx_REG (DImode, regno),
1711 i32, i32);
1713 else
1715 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1716 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1719 break;
1721 case CONST_INT:
1722 case CONST_DOUBLE:
1723 case MEM:
1724 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1725 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1726 break;
1728 default:
1729 gcc_unreachable ();
1732 /* The high part of CR <- GPR moves must be done after the low part. */
1733 operands [i + 4] = lo;
1734 operands [i + 2] = hi;
1737 if (reg_mentioned_p (operands[2], operands[5])
1738 || GET_CODE (operands[2]) == ZERO_EXTRACT
1739 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1741 rtx tmp;
1743 /* Overlapping register pairs -- make sure we don't
1744 early-clobber ourselves. */
1745 tmp = operands[2];
1746 operands[2] = operands[4];
1747 operands[4] = tmp;
1748 tmp = operands[3];
1749 operands[3] = operands[5];
1750 operands[5] = tmp;
1753 #if DEBUG_SPLIT_WIDE_MOVE
1754 fprintf(stderr, "\033[34m");
1755 debug_rtx (operands[2]);
1756 debug_rtx (operands[3]);
1757 debug_rtx (operands[4]);
1758 debug_rtx (operands[5]);
1759 fprintf(stderr, "\033[0m");
1760 #endif
1763 /* Emit a setcc instruction in its entirity. */
1765 static bool
1766 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1768 rtx tmp;
1770 switch (code)
1772 case GT:
1773 case GTU:
1774 tmp = op1, op1 = op2, op2 = tmp;
1775 code = swap_condition (code);
1776 /* FALLTHRU */
1778 case LT:
1779 case LTU:
1780 op1 = force_reg (SImode, op1);
1781 emit_insn (gen_rtx_SET (VOIDmode, dest,
1782 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1783 return true;
1785 case EQ:
1786 if (op2 != const0_rtx)
1787 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1788 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1789 return true;
1791 case NE:
1792 /* Branchful sequence:
1793 mov dest, 0 16-bit
1794 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1795 mov dest, 1 16-bit
1797 Branchless sequence:
1798 add3 tmp, op1, -op2 32-bit (or mov + sub)
1799 sltu3 tmp, tmp, 1 16-bit
1800 xor3 dest, tmp, 1 32-bit
1802 if (optimize_size && op2 != const0_rtx)
1803 return false;
1805 if (op2 != const0_rtx)
1806 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1808 op2 = gen_reg_rtx (SImode);
1809 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1811 emit_insn (gen_rtx_SET (VOIDmode, dest,
1812 gen_rtx_XOR (SImode, op2, const1_rtx)));
1813 return true;
1815 case LE:
1816 if (GET_CODE (op2) != CONST_INT
1817 || INTVAL (op2) == 0x7ffffff)
1818 return false;
1819 op2 = GEN_INT (INTVAL (op2) + 1);
1820 return mep_expand_setcc_1 (LT, dest, op1, op2);
1822 case LEU:
1823 if (GET_CODE (op2) != CONST_INT
1824 || INTVAL (op2) == -1)
1825 return false;
1826 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1827 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1829 case GE:
1830 if (GET_CODE (op2) != CONST_INT
1831 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1832 return false;
1833 op2 = GEN_INT (INTVAL (op2) - 1);
1834 return mep_expand_setcc_1 (GT, dest, op1, op2);
1836 case GEU:
1837 if (GET_CODE (op2) != CONST_INT
1838 || op2 == const0_rtx)
1839 return false;
1840 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1841 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1843 default:
1844 gcc_unreachable ();
1848 bool
1849 mep_expand_setcc (rtx *operands)
1851 rtx dest = operands[0];
1852 enum rtx_code code = GET_CODE (operands[1]);
1853 rtx op0 = operands[2];
1854 rtx op1 = operands[3];
1856 return mep_expand_setcc_1 (code, dest, op0, op1);
1860 mep_expand_cbranch (rtx *operands)
1862 enum rtx_code code = GET_CODE (operands[0]);
1863 rtx op0 = operands[1];
1864 rtx op1 = operands[2];
1865 rtx tmp;
1867 restart:
1868 switch (code)
1870 case LT:
1871 if (mep_imm4_operand (op1, SImode))
1872 break;
1874 tmp = gen_reg_rtx (SImode);
1875 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1876 code = NE;
1877 op0 = tmp;
1878 op1 = const0_rtx;
1879 break;
1881 case GE:
1882 if (mep_imm4_operand (op1, SImode))
1883 break;
1885 tmp = gen_reg_rtx (SImode);
1886 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1888 code = EQ;
1889 op0 = tmp;
1890 op1 = const0_rtx;
1891 break;
1893 case EQ:
1894 case NE:
1895 if (! mep_reg_or_imm4_operand (op1, SImode))
1896 op1 = force_reg (SImode, op1);
1897 break;
1899 case LE:
1900 case GT:
1901 if (GET_CODE (op1) == CONST_INT
1902 && INTVAL (op1) != 0x7fffffff)
1904 op1 = GEN_INT (INTVAL (op1) + 1);
1905 code = (code == LE ? LT : GE);
1906 goto restart;
1909 tmp = gen_reg_rtx (SImode);
1910 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1912 code = (code == LE ? EQ : NE);
1913 op0 = tmp;
1914 op1 = const0_rtx;
1915 break;
1917 case LTU:
1918 if (op1 == const1_rtx)
1920 code = EQ;
1921 op1 = const0_rtx;
1922 break;
1925 tmp = gen_reg_rtx (SImode);
1926 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1927 code = NE;
1928 op0 = tmp;
1929 op1 = const0_rtx;
1930 break;
1932 case LEU:
1933 tmp = gen_reg_rtx (SImode);
1934 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1935 code = NE;
1936 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1937 code = EQ;
1938 else
1939 gcc_unreachable ();
1940 op0 = tmp;
1941 op1 = const0_rtx;
1942 break;
1944 case GTU:
1945 tmp = gen_reg_rtx (SImode);
1946 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1947 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1948 code = NE;
1949 op0 = tmp;
1950 op1 = const0_rtx;
1951 break;
1953 case GEU:
1954 tmp = gen_reg_rtx (SImode);
1955 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1956 code = NE;
1957 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1958 code = EQ;
1959 else
1960 gcc_unreachable ();
1961 op0 = tmp;
1962 op1 = const0_rtx;
1963 break;
1965 default:
1966 gcc_unreachable ();
1969 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1972 const char *
1973 mep_emit_cbranch (rtx *operands, int ne)
1975 if (GET_CODE (operands[1]) == REG)
1976 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1977 else if (INTVAL (operands[1]) == 0)
1978 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1979 else
1980 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1983 void
1984 mep_expand_call (rtx *operands, int returns_value)
1986 rtx addr = operands[returns_value];
1987 rtx tp = mep_tp_rtx ();
1988 rtx gp = mep_gp_rtx ();
1990 gcc_assert (GET_CODE (addr) == MEM);
1992 addr = XEXP (addr, 0);
1994 if (! mep_call_address_operand (addr, VOIDmode))
1995 addr = force_reg (SImode, addr);
1997 if (! operands[returns_value+2])
1998 operands[returns_value+2] = const0_rtx;
2000 if (returns_value)
2001 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
2002 operands[3], tp, gp));
2003 else
2004 emit_call_insn (gen_call_internal (addr, operands[1],
2005 operands[2], tp, gp));
2008 /* Aliasing Support. */
2010 /* If X is a machine specific address (i.e. a symbol or label being
2011 referenced as a displacement from the GOT implemented using an
2012 UNSPEC), then return the base term. Otherwise return X. */
2015 mep_find_base_term (rtx x)
2017 rtx base, term;
2018 int unspec;
2020 if (GET_CODE (x) != PLUS)
2021 return x;
2022 base = XEXP (x, 0);
2023 term = XEXP (x, 1);
2025 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2026 && base == mep_tp_rtx ())
2027 unspec = UNS_TPREL;
2028 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2029 && base == mep_gp_rtx ())
2030 unspec = UNS_GPREL;
2031 else
2032 return x;
2034 if (GET_CODE (term) != CONST)
2035 return x;
2036 term = XEXP (term, 0);
2038 if (GET_CODE (term) != UNSPEC
2039 || XINT (term, 1) != unspec)
2040 return x;
2042 return XVECEXP (term, 0, 0);
2045 /* Reload Support. */
2047 /* Return true if the registers in CLASS cannot represent the change from
2048 modes FROM to TO. */
2050 bool
2051 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2052 enum reg_class regclass)
2054 if (from == to)
2055 return false;
2057 /* 64-bit COP regs must remain 64-bit COP regs. */
2058 if (TARGET_64BIT_CR_REGS
2059 && (regclass == CR_REGS
2060 || regclass == LOADABLE_CR_REGS)
2061 && (GET_MODE_SIZE (to) < 8
2062 || GET_MODE_SIZE (from) < 8))
2063 return true;
2065 return false;
2068 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2070 static bool
2071 mep_general_reg (rtx x)
2073 while (GET_CODE (x) == SUBREG)
2074 x = XEXP (x, 0);
2075 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2078 static bool
2079 mep_nongeneral_reg (rtx x)
2081 while (GET_CODE (x) == SUBREG)
2082 x = XEXP (x, 0);
2083 return (GET_CODE (x) == REG
2084 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2087 static bool
2088 mep_general_copro_reg (rtx x)
2090 while (GET_CODE (x) == SUBREG)
2091 x = XEXP (x, 0);
2092 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2095 static bool
2096 mep_nonregister (rtx x)
2098 while (GET_CODE (x) == SUBREG)
2099 x = XEXP (x, 0);
2100 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2103 #define DEBUG_RELOAD 0
2105 /* Return the secondary reload class needed for moving value X to or
2106 from a register in coprocessor register class CLASS. */
2108 static enum reg_class
2109 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2111 if (mep_general_reg (x))
2112 /* We can do the move directly if mep_have_core_copro_moves_p,
2113 otherwise we need to go through memory. Either way, no secondary
2114 register is needed. */
2115 return NO_REGS;
2117 if (mep_general_copro_reg (x))
2119 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2120 if (mep_have_copro_copro_moves_p)
2121 return NO_REGS;
2123 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2124 if (mep_have_core_copro_moves_p)
2125 return GENERAL_REGS;
2127 /* Otherwise we need to do it through memory. No secondary
2128 register is needed. */
2129 return NO_REGS;
2132 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2133 && constraint_satisfied_p (x, CONSTRAINT_U))
2134 /* X is a memory value that we can access directly. */
2135 return NO_REGS;
2137 /* We have to move X into a GPR first and then copy it to
2138 the coprocessor register. The move from the GPR to the
2139 coprocessor might be done directly or through memory,
2140 depending on mep_have_core_copro_moves_p. */
2141 return GENERAL_REGS;
2144 /* Copying X to register in RCLASS. */
2147 mep_secondary_input_reload_class (enum reg_class rclass,
2148 enum machine_mode mode ATTRIBUTE_UNUSED,
2149 rtx x)
2151 int rv = NO_REGS;
2153 #if DEBUG_RELOAD
2154 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2155 debug_rtx (x);
2156 #endif
2158 if (reg_class_subset_p (rclass, CR_REGS))
2159 rv = mep_secondary_copro_reload_class (rclass, x);
2160 else if (MEP_NONGENERAL_CLASS (rclass)
2161 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2162 rv = GENERAL_REGS;
2164 #if DEBUG_RELOAD
2165 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2166 #endif
2167 return rv;
2170 /* Copying register in RCLASS to X. */
2173 mep_secondary_output_reload_class (enum reg_class rclass,
2174 enum machine_mode mode ATTRIBUTE_UNUSED,
2175 rtx x)
2177 int rv = NO_REGS;
2179 #if DEBUG_RELOAD
2180 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2181 debug_rtx (x);
2182 #endif
2184 if (reg_class_subset_p (rclass, CR_REGS))
2185 rv = mep_secondary_copro_reload_class (rclass, x);
2186 else if (MEP_NONGENERAL_CLASS (rclass)
2187 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2188 rv = GENERAL_REGS;
2190 #if DEBUG_RELOAD
2191 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2192 #endif
2194 return rv;
2197 /* Implement SECONDARY_MEMORY_NEEDED. */
2199 bool
2200 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2201 enum machine_mode mode ATTRIBUTE_UNUSED)
2203 if (!mep_have_core_copro_moves_p)
2205 if (reg_classes_intersect_p (rclass1, CR_REGS)
2206 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2207 return true;
2208 if (reg_classes_intersect_p (rclass2, CR_REGS)
2209 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2210 return true;
2211 if (!mep_have_copro_copro_moves_p
2212 && reg_classes_intersect_p (rclass1, CR_REGS)
2213 && reg_classes_intersect_p (rclass2, CR_REGS))
2214 return true;
2216 return false;
2219 void
2220 mep_expand_reload (rtx *operands, enum machine_mode mode)
2222 /* There are three cases for each direction:
2223 register, farsym
2224 control, farsym
2225 control, nearsym */
2227 int s0 = mep_section_tag (operands[0]) == 'f';
2228 int s1 = mep_section_tag (operands[1]) == 'f';
2229 int c0 = mep_nongeneral_reg (operands[0]);
2230 int c1 = mep_nongeneral_reg (operands[1]);
2231 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2233 #if DEBUG_RELOAD
2234 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2235 debug_rtx (operands[0]);
2236 debug_rtx (operands[1]);
2237 #endif
2239 switch (which)
2241 case 00: /* Don't know why this gets here. */
2242 case 02: /* general = far */
2243 emit_move_insn (operands[0], operands[1]);
2244 return;
2246 case 10: /* cr = mem */
2247 case 11: /* cr = cr */
2248 case 01: /* mem = cr */
2249 case 12: /* cr = far */
2250 emit_move_insn (operands[2], operands[1]);
2251 emit_move_insn (operands[0], operands[2]);
2252 return;
2254 case 20: /* far = general */
2255 emit_move_insn (operands[2], XEXP (operands[1], 0));
2256 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2257 return;
2259 case 21: /* far = cr */
2260 case 22: /* far = far */
2261 default:
2262 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2263 which, mode_name[mode]);
2264 debug_rtx (operands[0]);
2265 debug_rtx (operands[1]);
2266 gcc_unreachable ();
2270 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2271 can be moved directly into registers 0 to 7, but not into the rest.
2272 If so, and if the required class includes registers 0 to 7, restrict
2273 it to those registers. */
2275 enum reg_class
2276 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2278 switch (GET_CODE (x))
2280 case CONST_INT:
2281 if (INTVAL (x) >= 0x10000
2282 && INTVAL (x) < 0x01000000
2283 && (INTVAL (x) & 0xffff) != 0
2284 && reg_class_subset_p (TPREL_REGS, rclass))
2285 rclass = TPREL_REGS;
2286 break;
2288 case CONST:
2289 case SYMBOL_REF:
2290 case LABEL_REF:
2291 if (mep_section_tag (x) != 'f'
2292 && reg_class_subset_p (TPREL_REGS, rclass))
2293 rclass = TPREL_REGS;
2294 break;
2296 default:
2297 break;
2299 return rclass;
2302 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2303 moves, 4 for direct double-register moves, and 1000 for anything
2304 that requires a temporary register or temporary stack slot. */
2307 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2309 if (mep_have_copro_copro_moves_p
2310 && reg_class_subset_p (from, CR_REGS)
2311 && reg_class_subset_p (to, CR_REGS))
2313 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2314 return 4;
2315 return 2;
2317 if (reg_class_subset_p (from, CR_REGS)
2318 && reg_class_subset_p (to, CR_REGS))
2320 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2321 return 8;
2322 return 4;
2324 if (reg_class_subset_p (from, CR_REGS)
2325 || reg_class_subset_p (to, CR_REGS))
2327 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2328 return 4;
2329 return 2;
2331 if (mep_secondary_memory_needed (from, to, mode))
2332 return 1000;
2333 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2334 return 1000;
2336 if (GET_MODE_SIZE (mode) > 4)
2337 return 4;
2339 return 2;
2343 /* Functions to save and restore machine-specific function data. */
2345 static struct machine_function *
2346 mep_init_machine_status (void)
2348 struct machine_function *f;
2350 f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2352 return f;
2355 static rtx
2356 mep_allocate_initial_value (rtx reg)
2358 int rss;
2360 if (GET_CODE (reg) != REG)
2361 return NULL_RTX;
2363 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2364 return NULL_RTX;
2366 /* In interrupt functions, the "initial" values of $gp and $tp are
2367 provided by the prologue. They are not necessarily the same as
2368 the values that the caller was using. */
2369 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2370 if (mep_interrupt_p ())
2371 return NULL_RTX;
2373 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2375 cfun->machine->reg_save_size += 4;
2376 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2379 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2380 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2384 mep_return_addr_rtx (int count)
2386 if (count != 0)
2387 return const0_rtx;
2389 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2392 static rtx
2393 mep_tp_rtx (void)
2395 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2398 static rtx
2399 mep_gp_rtx (void)
2401 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2404 static bool
2405 mep_interrupt_p (void)
2407 if (cfun->machine->interrupt_handler == 0)
2409 int interrupt_handler
2410 = (lookup_attribute ("interrupt",
2411 DECL_ATTRIBUTES (current_function_decl))
2412 != NULL_TREE);
2413 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2415 return cfun->machine->interrupt_handler == 2;
2418 static bool
2419 mep_disinterrupt_p (void)
2421 if (cfun->machine->disable_interrupts == 0)
2423 int disable_interrupts
2424 = (lookup_attribute ("disinterrupt",
2425 DECL_ATTRIBUTES (current_function_decl))
2426 != NULL_TREE);
2427 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2429 return cfun->machine->disable_interrupts == 2;
2433 /* Frame/Epilog/Prolog Related. */
2435 static bool
2436 mep_reg_set_p (rtx reg, rtx insn)
2438 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2439 if (INSN_P (insn))
2441 if (FIND_REG_INC_NOTE (insn, reg))
2442 return true;
2443 insn = PATTERN (insn);
2446 if (GET_CODE (insn) == SET
2447 && GET_CODE (XEXP (insn, 0)) == REG
2448 && GET_CODE (XEXP (insn, 1)) == REG
2449 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2450 return false;
2452 return set_of (reg, insn) != NULL_RTX;
2456 #define MEP_SAVES_UNKNOWN 0
2457 #define MEP_SAVES_YES 1
2458 #define MEP_SAVES_MAYBE 2
2459 #define MEP_SAVES_NO 3
2461 static bool
2462 mep_reg_set_in_function (int regno)
2464 rtx reg, insn;
2466 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2467 return true;
2469 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2470 return true;
2472 push_topmost_sequence ();
2473 insn = get_insns ();
2474 pop_topmost_sequence ();
2476 if (!insn)
2477 return false;
2479 reg = gen_rtx_REG (SImode, regno);
2481 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2482 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2483 return true;
2484 return false;
2487 static bool
2488 mep_asm_without_operands_p (void)
2490 if (cfun->machine->asms_without_operands == 0)
2492 rtx insn;
2494 push_topmost_sequence ();
2495 insn = get_insns ();
2496 pop_topmost_sequence ();
2498 cfun->machine->asms_without_operands = 1;
2499 while (insn)
2501 if (INSN_P (insn)
2502 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2504 cfun->machine->asms_without_operands = 2;
2505 break;
2507 insn = NEXT_INSN (insn);
2511 return cfun->machine->asms_without_operands == 2;
2514 /* Interrupt functions save/restore every call-preserved register, and
2515 any call-used register it uses (or all if it calls any function,
2516 since they may get clobbered there too). Here we check to see
2517 which call-used registers need saving. */
2519 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2520 && (r == FIRST_CCR_REGNO + 1 \
2521 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2522 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2524 static bool
2525 mep_interrupt_saved_reg (int r)
2527 if (!mep_interrupt_p ())
2528 return false;
2529 if (r == REGSAVE_CONTROL_TEMP
2530 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2531 return true;
2532 if (mep_asm_without_operands_p ()
2533 && (!fixed_regs[r]
2534 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2535 || IVC2_ISAVED_REG (r)))
2536 return true;
2537 if (!current_function_is_leaf)
2538 /* Function calls mean we need to save $lp. */
2539 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2540 return true;
2541 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2542 /* The interrupt handler might use these registers for repeat blocks,
2543 or it might call a function that does so. */
2544 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2545 return true;
2546 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2547 return false;
2548 /* Functions we call might clobber these. */
2549 if (call_used_regs[r] && !fixed_regs[r])
2550 return true;
2551 /* Additional registers that need to be saved for IVC2. */
2552 if (IVC2_ISAVED_REG (r))
2553 return true;
2555 return false;
2558 static bool
2559 mep_call_saves_register (int r)
2561 /* if (cfun->machine->reg_saved[r] == MEP_SAVES_UNKNOWN)*/
2563 int rv = MEP_SAVES_NO;
2565 if (cfun->machine->reg_save_slot[r])
2566 rv = MEP_SAVES_YES;
2567 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2568 rv = MEP_SAVES_YES;
2569 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2570 rv = MEP_SAVES_YES;
2571 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2572 rv = MEP_SAVES_YES;
2573 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2574 /* We need these to have stack slots so that they can be set during
2575 unwinding. */
2576 rv = MEP_SAVES_YES;
2577 else if (mep_interrupt_saved_reg (r))
2578 rv = MEP_SAVES_YES;
2579 cfun->machine->reg_saved[r] = rv;
2581 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2584 /* Return true if epilogue uses register REGNO. */
2586 bool
2587 mep_epilogue_uses (int regno)
2589 /* Since $lp is a call-saved register, the generic code will normally
2590 mark it used in the epilogue if it needs to be saved and restored.
2591 However, when profiling is enabled, the profiling code will implicitly
2592 clobber $11. This case has to be handled specially both here and in
2593 mep_call_saves_register. */
2594 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2595 return true;
2596 /* Interrupt functions save/restore pretty much everything. */
2597 return (reload_completed && mep_interrupt_saved_reg (regno));
2600 static int
2601 mep_reg_size (int regno)
2603 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2604 return 8;
2605 return 4;
2609 mep_elimination_offset (int from, int to)
2611 int reg_save_size;
2612 int i;
2613 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2614 int total_size;
2616 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2618 /* We don't count arg_regs_to_save in the arg pointer offset, because
2619 gcc thinks the arg pointer has moved along with the saved regs.
2620 However, we do count it when we adjust $sp in the prologue. */
2621 reg_save_size = 0;
2622 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2623 if (mep_call_saves_register (i))
2624 reg_save_size += mep_reg_size (i);
2626 if (reg_save_size % 8)
2627 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2628 else
2629 cfun->machine->regsave_filler = 0;
2631 /* This is what our total stack adjustment looks like. */
2632 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2634 if (total_size % 8)
2635 cfun->machine->frame_filler = 8 - (total_size % 8);
2636 else
2637 cfun->machine->frame_filler = 0;
2640 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2641 return reg_save_size + cfun->machine->regsave_filler;
2643 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2644 return cfun->machine->frame_filler + frame_size;
2646 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2647 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2649 gcc_unreachable ();
2652 static rtx
2653 F (rtx x)
2655 RTX_FRAME_RELATED_P (x) = 1;
2656 return x;
2659 /* Since the prologue/epilogue code is generated after optimization,
2660 we can't rely on gcc to split constants for us. So, this code
2661 captures all the ways to add a constant to a register in one logic
2662 chunk, including optimizing away insns we just don't need. This
2663 makes the prolog/epilog code easier to follow. */
2664 static void
2665 add_constant (int dest, int src, int value, int mark_frame)
2667 rtx insn;
2668 int hi, lo;
2670 if (src == dest && value == 0)
2671 return;
2673 if (value == 0)
2675 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2676 gen_rtx_REG (SImode, src));
2677 if (mark_frame)
2678 RTX_FRAME_RELATED_P(insn) = 1;
2679 return;
2682 if (value >= -32768 && value <= 32767)
2684 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2685 gen_rtx_REG (SImode, src),
2686 GEN_INT (value)));
2687 if (mark_frame)
2688 RTX_FRAME_RELATED_P(insn) = 1;
2689 return;
2692 /* Big constant, need to use a temp register. We use
2693 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2694 area is always small enough to directly add to). */
2696 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2697 lo = value & 0xffff;
2699 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2700 GEN_INT (hi));
2702 if (lo)
2704 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2705 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2706 GEN_INT (lo)));
2709 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2710 gen_rtx_REG (SImode, src),
2711 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2712 if (mark_frame)
2714 RTX_FRAME_RELATED_P(insn) = 1;
2715 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2716 gen_rtx_SET (SImode,
2717 gen_rtx_REG (SImode, dest),
2718 gen_rtx_PLUS (SImode,
2719 gen_rtx_REG (SImode, dest),
2720 GEN_INT (value))));
2724 static bool
2725 mep_function_uses_sp (void)
2727 rtx insn;
2728 struct sequence_stack *seq;
2729 rtx sp = gen_rtx_REG (SImode, SP_REGNO);
2731 insn = get_insns ();
2732 for (seq = crtl->emit.sequence_stack;
2733 seq;
2734 insn = seq->first, seq = seq->next);
2736 while (insn)
2738 if (mep_mentioned_p (insn, sp, 0))
2739 return true;
2740 insn = NEXT_INSN (insn);
2742 return false;
2745 /* Move SRC to DEST. Mark the move as being potentially dead if
2746 MAYBE_DEAD_P. */
2748 static rtx
2749 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2751 rtx insn = emit_move_insn (dest, src);
2752 #if 0
2753 if (maybe_dead_p)
2754 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2755 #endif
2756 return insn;
2759 /* Used for interrupt functions, which can't assume that $tp and $gp
2760 contain the correct pointers. */
2762 static void
2763 mep_reload_pointer (int regno, const char *symbol)
2765 rtx reg, sym;
2767 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2768 return;
2770 reg = gen_rtx_REG (SImode, regno);
2771 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2772 emit_insn (gen_movsi_topsym_s (reg, sym));
2773 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2776 void
2777 mep_expand_prologue (void)
2779 int i, rss, sp_offset = 0;
2780 int reg_save_size;
2781 int frame_size;
2782 int really_need_stack_frame = frame_size;
2783 int di_ofs = 0;
2785 /* We must not allow register renaming in interrupt functions,
2786 because that invalidates the correctness of the set of call-used
2787 registers we're going to save/restore. */
2788 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2790 if (mep_disinterrupt_p ())
2791 emit_insn (gen_mep_disable_int ());
2793 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2795 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2796 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2798 /* Assign save slots for any register not already saved. DImode
2799 registers go at the end of the reg save area; the rest go at the
2800 beginning. This is for alignment purposes. */
2801 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2802 if (mep_call_saves_register(i))
2804 int regsize = mep_reg_size (i);
2806 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2807 || mep_reg_set_in_function (i))
2808 really_need_stack_frame = 1;
2810 if (cfun->machine->reg_save_slot[i])
2811 continue;
2813 if (regsize < 8)
2815 cfun->machine->reg_save_size += regsize;
2816 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2818 else
2820 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2821 di_ofs += 8;
2825 sp_offset = reg_save_size;
2826 if (sp_offset + frame_size < 128)
2827 sp_offset += frame_size ;
2829 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2831 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2832 if (mep_call_saves_register(i))
2834 rtx mem;
2835 bool maybe_dead_p;
2836 enum machine_mode rmode;
2838 rss = cfun->machine->reg_save_slot[i];
2840 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2841 && (!mep_reg_set_in_function (i)
2842 && !mep_interrupt_p ()))
2843 continue;
2845 if (mep_reg_size (i) == 8)
2846 rmode = DImode;
2847 else
2848 rmode = SImode;
2850 /* If there is a pseudo associated with this register's initial value,
2851 reload might have already spilt it to the stack slot suggested by
2852 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2853 deleted as dead. */
2854 mem = gen_rtx_MEM (rmode,
2855 plus_constant (stack_pointer_rtx, sp_offset - rss));
2856 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2858 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2859 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2860 else if (rmode == DImode)
2862 rtx insn;
2863 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2865 mem = gen_rtx_MEM (SImode,
2866 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2868 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2869 gen_rtx_REG (SImode, i),
2870 maybe_dead_p);
2871 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2872 gen_rtx_ZERO_EXTRACT (SImode,
2873 gen_rtx_REG (DImode, i),
2874 GEN_INT (32),
2875 GEN_INT (32)),
2876 maybe_dead_p);
2877 insn = maybe_dead_move (mem,
2878 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2879 maybe_dead_p);
2880 RTX_FRAME_RELATED_P (insn) = 1;
2882 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2883 gen_rtx_SET (VOIDmode,
2884 copy_rtx (mem),
2885 gen_rtx_REG (rmode, i)));
2886 mem = gen_rtx_MEM (SImode,
2887 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2888 insn = maybe_dead_move (mem,
2889 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2890 maybe_dead_p);
2892 else
2894 rtx insn;
2895 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2896 gen_rtx_REG (rmode, i),
2897 maybe_dead_p);
2898 insn = maybe_dead_move (mem,
2899 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2900 maybe_dead_p);
2901 RTX_FRAME_RELATED_P (insn) = 1;
2903 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2904 gen_rtx_SET (VOIDmode,
2905 copy_rtx (mem),
2906 gen_rtx_REG (rmode, i)));
2910 if (frame_pointer_needed)
2912 /* We've already adjusted down by sp_offset. Total $sp change
2913 is reg_save_size + frame_size. We want a net change here of
2914 just reg_save_size. */
2915 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2918 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2920 if (mep_interrupt_p ())
2922 mep_reload_pointer(GP_REGNO, "__sdabase");
2923 mep_reload_pointer(TP_REGNO, "__tpbase");
2927 static void
2928 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2930 int local = hwi_local;
2931 int frame_size = local + crtl->outgoing_args_size;
2932 int reg_save_size;
2933 int ffill;
2934 int i, sp, skip;
2935 int sp_offset;
2936 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2938 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2939 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2940 sp_offset = reg_save_size + frame_size;
2942 ffill = cfun->machine->frame_filler;
2944 if (cfun->machine->mep_frame_pointer_needed)
2945 reg_names[FP_REGNO] = "$fp";
2946 else
2947 reg_names[FP_REGNO] = "$8";
2949 if (sp_offset == 0)
2950 return;
2952 if (debug_info_level == DINFO_LEVEL_NONE)
2954 fprintf (file, "\t# frame: %d", sp_offset);
2955 if (reg_save_size)
2956 fprintf (file, " %d regs", reg_save_size);
2957 if (local)
2958 fprintf (file, " %d locals", local);
2959 if (crtl->outgoing_args_size)
2960 fprintf (file, " %d args", crtl->outgoing_args_size);
2961 fprintf (file, "\n");
2962 return;
2965 fprintf (file, "\t#\n");
2966 fprintf (file, "\t# Initial Frame Information:\n");
2967 if (sp_offset || !frame_pointer_needed)
2968 fprintf (file, "\t# Entry ---------- 0\n");
2970 /* Sort registers by save slots, so they're printed in the order
2971 they appear in memory, not the order they're saved in. */
2972 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2973 slot_map[si] = si;
2974 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2975 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2976 if (cfun->machine->reg_save_slot[slot_map[si]]
2977 > cfun->machine->reg_save_slot[slot_map[sj]])
2979 int t = slot_map[si];
2980 slot_map[si] = slot_map[sj];
2981 slot_map[sj] = t;
2984 sp = 0;
2985 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2987 int rsize;
2988 int r = slot_map[i];
2989 int rss = cfun->machine->reg_save_slot[r];
2991 if (!rss)
2992 continue;
2994 rsize = mep_reg_size(r);
2995 skip = rss - (sp+rsize);
2996 if (skip)
2997 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2998 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2999 rsize, reg_names[r], sp_offset - rss);
3000 sp = rss;
3003 skip = reg_save_size - sp;
3004 if (skip)
3005 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3007 if (frame_pointer_needed)
3008 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3009 if (local)
3010 fprintf (file, "\t# %3d bytes for local vars\n", local);
3011 if (ffill)
3012 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3013 if (crtl->outgoing_args_size)
3014 fprintf (file, "\t# %3d bytes for outgoing args\n",
3015 crtl->outgoing_args_size);
3016 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3017 fprintf (file, "\t#\n");
3021 static int mep_prevent_lp_restore = 0;
3022 static int mep_sibcall_epilogue = 0;
3024 void
3025 mep_expand_epilogue (void)
3027 int i, sp_offset = 0;
3028 int reg_save_size = 0;
3029 int frame_size;
3030 int lp_temp = LP_REGNO, lp_slot = -1;
3031 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3032 int interrupt_handler = mep_interrupt_p ();
3034 if (profile_arc_flag == 2)
3035 emit_insn (gen_mep_bb_trace_ret ());
3037 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3038 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3040 /* All save slots are set by mep_expand_prologue. */
3041 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
3042 if (mep_call_saves_register(i))
3044 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
3045 || mep_reg_set_in_function (i))
3046 really_need_stack_frame = 1;
3049 if (frame_pointer_needed)
3051 /* If we have a frame pointer, we won't have a reliable stack
3052 pointer (alloca, you know), so rebase SP from FP */
3053 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3054 gen_rtx_REG (SImode, FP_REGNO));
3055 sp_offset = reg_save_size;
3057 else
3059 /* SP is right under our local variable space. Adjust it if
3060 needed. */
3061 sp_offset = reg_save_size + frame_size;
3062 if (sp_offset >= 128)
3064 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3065 sp_offset -= frame_size;
3069 /* This is backwards so that we restore the control and coprocessor
3070 registers before the temporary registers we use to restore
3071 them. */
3072 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3073 if (mep_call_saves_register (i))
3075 enum machine_mode rmode;
3076 int rss = cfun->machine->reg_save_slot[i];
3078 if (mep_reg_size (i) == 8)
3079 rmode = DImode;
3080 else
3081 rmode = SImode;
3083 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3084 && !(mep_reg_set_in_function (i) || interrupt_handler))
3085 continue;
3086 if (mep_prevent_lp_restore && i == LP_REGNO)
3087 continue;
3088 if (!mep_prevent_lp_restore
3089 && !interrupt_handler
3090 && (i == 10 || i == 11))
3091 continue;
3093 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3094 emit_move_insn (gen_rtx_REG (rmode, i),
3095 gen_rtx_MEM (rmode,
3096 plus_constant (stack_pointer_rtx,
3097 sp_offset-rss)));
3098 else
3100 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3101 /* Defer this one so we can jump indirect rather than
3102 copying the RA to $lp and "ret". EH epilogues
3103 automatically skip this anyway. */
3104 lp_slot = sp_offset-rss;
3105 else
3107 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3108 gen_rtx_MEM (rmode,
3109 plus_constant (stack_pointer_rtx,
3110 sp_offset-rss)));
3111 emit_move_insn (gen_rtx_REG (rmode, i),
3112 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3116 if (lp_slot != -1)
3118 /* Restore this one last so we know it will be in the temp
3119 register when we return by jumping indirectly via the temp. */
3120 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3121 gen_rtx_MEM (SImode,
3122 plus_constant (stack_pointer_rtx,
3123 lp_slot)));
3124 lp_temp = REGSAVE_CONTROL_TEMP;
3128 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3130 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3131 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3132 gen_rtx_REG (SImode, SP_REGNO),
3133 cfun->machine->eh_stack_adjust));
3135 if (mep_sibcall_epilogue)
3136 return;
3138 if (mep_disinterrupt_p ())
3139 emit_insn (gen_mep_enable_int ());
3141 if (mep_prevent_lp_restore)
3143 emit_jump_insn (gen_eh_return_internal ());
3144 emit_barrier ();
3146 else if (interrupt_handler)
3147 emit_jump_insn (gen_mep_reti ());
3148 else
3149 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3152 void
3153 mep_expand_eh_return (rtx *operands)
3155 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3157 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3158 emit_move_insn (ra, operands[0]);
3159 operands[0] = ra;
3162 emit_insn (gen_eh_epilogue (operands[0]));
3165 void
3166 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3168 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3169 mep_prevent_lp_restore = 1;
3170 mep_expand_epilogue ();
3171 mep_prevent_lp_restore = 0;
3174 void
3175 mep_expand_sibcall_epilogue (void)
3177 mep_sibcall_epilogue = 1;
3178 mep_expand_epilogue ();
3179 mep_sibcall_epilogue = 0;
3182 static bool
3183 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3185 if (decl == NULL)
3186 return false;
3188 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3189 return false;
3191 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3192 if (mep_interrupt_p () || mep_disinterrupt_p ())
3193 return false;
3195 return true;
3199 mep_return_stackadj_rtx (void)
3201 return gen_rtx_REG (SImode, 10);
3205 mep_return_handler_rtx (void)
3207 return gen_rtx_REG (SImode, LP_REGNO);
3210 void
3211 mep_function_profiler (FILE *file)
3213 /* Always right at the beginning of the function. */
3214 fprintf (file, "\t# mep function profiler\n");
3215 fprintf (file, "\tadd\t$sp, -8\n");
3216 fprintf (file, "\tsw\t$0, ($sp)\n");
3217 fprintf (file, "\tldc\t$0, $lp\n");
3218 fprintf (file, "\tsw\t$0, 4($sp)\n");
3219 fprintf (file, "\tbsr\t__mep_mcount\n");
3220 fprintf (file, "\tlw\t$0, 4($sp)\n");
3221 fprintf (file, "\tstc\t$0, $lp\n");
3222 fprintf (file, "\tlw\t$0, ($sp)\n");
3223 fprintf (file, "\tadd\t$sp, 8\n\n");
3226 const char *
3227 mep_emit_bb_trace_ret (void)
3229 fprintf (asm_out_file, "\t# end of block profiling\n");
3230 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3231 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3232 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3233 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3234 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3235 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3236 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3237 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3238 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3239 return "";
3242 #undef SAVE
3243 #undef RESTORE
3245 /* Operand Printing. */
3247 void
3248 mep_print_operand_address (FILE *stream, rtx address)
3250 if (GET_CODE (address) == MEM)
3251 address = XEXP (address, 0);
3252 else
3253 /* cf: gcc.dg/asm-4.c. */
3254 gcc_assert (GET_CODE (address) == REG);
3256 mep_print_operand (stream, address, 0);
3259 static struct
3261 char code;
3262 const char *pattern;
3263 const char *format;
3265 const conversions[] =
3267 { 0, "r", "0" },
3268 { 0, "m+ri", "3(2)" },
3269 { 0, "mr", "(1)" },
3270 { 0, "ms", "(1)" },
3271 { 0, "mLrs", "%lo(3)(2)" },
3272 { 0, "mLr+si", "%lo(4+5)(2)" },
3273 { 0, "m+ru2s", "%tpoff(5)(2)" },
3274 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3275 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3276 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3277 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3278 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3279 { 0, "mi", "(1)" },
3280 { 0, "m+si", "(2+3)" },
3281 { 0, "m+li", "(2+3)" },
3282 { 0, "i", "0" },
3283 { 0, "s", "0" },
3284 { 0, "+si", "1+2" },
3285 { 0, "+u2si", "%tpoff(3+4)" },
3286 { 0, "+u3si", "%sdaoff(3+4)" },
3287 { 0, "l", "0" },
3288 { 'b', "i", "0" },
3289 { 'B', "i", "0" },
3290 { 'U', "i", "0" },
3291 { 'h', "i", "0" },
3292 { 'h', "Hs", "%hi(1)" },
3293 { 'I', "i", "0" },
3294 { 'I', "u2s", "%tpoff(2)" },
3295 { 'I', "u3s", "%sdaoff(2)" },
3296 { 'I', "+u2si", "%tpoff(3+4)" },
3297 { 'I', "+u3si", "%sdaoff(3+4)" },
3298 { 'J', "i", "0" },
3299 { 'P', "mr", "(1\\+),\\0" },
3300 { 'x', "i", "0" },
3301 { 0, 0, 0 }
3304 static int
3305 unique_bit_in (HOST_WIDE_INT i)
3307 switch (i & 0xff)
3309 case 0x01: case 0xfe: return 0;
3310 case 0x02: case 0xfd: return 1;
3311 case 0x04: case 0xfb: return 2;
3312 case 0x08: case 0xf7: return 3;
3313 case 0x10: case 0x7f: return 4;
3314 case 0x20: case 0xbf: return 5;
3315 case 0x40: case 0xdf: return 6;
3316 case 0x80: case 0xef: return 7;
3317 default:
3318 gcc_unreachable ();
3322 static int
3323 bit_size_for_clip (HOST_WIDE_INT i)
3325 int rv;
3327 for (rv = 0; rv < 31; rv ++)
3328 if (((HOST_WIDE_INT) 1 << rv) > i)
3329 return rv + 1;
3330 gcc_unreachable ();
3333 /* Print an operand to a assembler instruction. */
3335 void
3336 mep_print_operand (FILE *file, rtx x, int code)
3338 int i, j;
3339 const char *real_name;
3341 if (code == '<')
3343 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3344 we're using, then skip over the "mep_" part of its name. */
3345 const struct cgen_insn *insn;
3347 if (mep_get_move_insn (mep_cmov, &insn))
3348 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3349 else
3350 mep_intrinsic_unavailable (mep_cmov);
3351 return;
3353 if (code == 'L')
3355 switch (GET_CODE (x))
3357 case AND:
3358 fputs ("clr", file);
3359 return;
3360 case IOR:
3361 fputs ("set", file);
3362 return;
3363 case XOR:
3364 fputs ("not", file);
3365 return;
3366 default:
3367 output_operand_lossage ("invalid %%L code");
3370 if (code == 'M')
3372 /* Print the second operand of a CR <- CR move. If we're using
3373 a two-operand instruction (i.e., a real cmov), then just print
3374 the operand normally. If we're using a "reg, reg, immediate"
3375 instruction such as caddi3, print the operand followed by a
3376 zero field. If we're using a three-register instruction,
3377 print the operand twice. */
3378 const struct cgen_insn *insn;
3380 mep_print_operand (file, x, 0);
3381 if (mep_get_move_insn (mep_cmov, &insn)
3382 && insn_data[insn->icode].n_operands == 3)
3384 fputs (", ", file);
3385 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3386 mep_print_operand (file, x, 0);
3387 else
3388 mep_print_operand (file, const0_rtx, 0);
3390 return;
3393 encode_pattern (x);
3394 for (i = 0; conversions[i].pattern; i++)
3395 if (conversions[i].code == code
3396 && strcmp(conversions[i].pattern, pattern) == 0)
3398 for (j = 0; conversions[i].format[j]; j++)
3399 if (conversions[i].format[j] == '\\')
3401 fputc (conversions[i].format[j+1], file);
3402 j++;
3404 else if (ISDIGIT(conversions[i].format[j]))
3406 rtx r = patternr[conversions[i].format[j] - '0'];
3407 switch (GET_CODE (r))
3409 case REG:
3410 fprintf (file, "%s", reg_names [REGNO (r)]);
3411 break;
3412 case CONST_INT:
3413 switch (code)
3415 case 'b':
3416 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3417 break;
3418 case 'B':
3419 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3420 break;
3421 case 'h':
3422 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3423 break;
3424 case 'U':
3425 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3426 break;
3427 case 'J':
3428 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3429 break;
3430 case 'x':
3431 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3432 && !(INTVAL (r) & 0xff))
3433 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3434 else
3435 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3436 break;
3437 case 'I':
3438 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3439 && conversions[i].format[j+1] == 0)
3441 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3442 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3444 else
3445 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3446 break;
3447 default:
3448 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3449 break;
3451 break;
3452 case CONST_DOUBLE:
3453 fprintf(file, "[const_double 0x%lx]",
3454 (unsigned long) CONST_DOUBLE_HIGH(r));
3455 break;
3456 case SYMBOL_REF:
3457 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3458 assemble_name (file, real_name);
3459 break;
3460 case LABEL_REF:
3461 output_asm_label (r);
3462 break;
3463 default:
3464 fprintf (stderr, "don't know how to print this operand:");
3465 debug_rtx (r);
3466 gcc_unreachable ();
3469 else
3471 if (conversions[i].format[j] == '+'
3472 && (!code || code == 'I')
3473 && ISDIGIT (conversions[i].format[j+1])
3474 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3475 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3476 continue;
3477 fputc(conversions[i].format[j], file);
3479 break;
3481 if (!conversions[i].pattern)
3483 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3484 debug_rtx(x);
3487 return;
3490 void
3491 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3492 int noperands ATTRIBUTE_UNUSED)
3494 /* Despite the fact that MeP is perfectly capable of branching and
3495 doing something else in the same bundle, gcc does jump
3496 optimization *after* scheduling, so we cannot trust the bundling
3497 flags on jump instructions. */
3498 if (GET_MODE (insn) == BImode
3499 && get_attr_slots (insn) != SLOTS_CORE)
3500 fputc ('+', asm_out_file);
3503 /* Function args in registers. */
3505 static void
3506 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3507 enum machine_mode mode ATTRIBUTE_UNUSED,
3508 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3509 int second_time ATTRIBUTE_UNUSED)
3511 int nsave = 4 - (cum->nregs + 1);
3513 if (nsave > 0)
3514 cfun->machine->arg_regs_to_save = nsave;
3515 *pretend_size = nsave * 4;
3518 static int
3519 bytesize (const_tree type, enum machine_mode mode)
3521 if (mode == BLKmode)
3522 return int_size_in_bytes (type);
3523 return GET_MODE_SIZE (mode);
3526 static rtx
3527 mep_expand_builtin_saveregs (void)
3529 int bufsize, i, ns;
3530 rtx regbuf;
3532 ns = cfun->machine->arg_regs_to_save;
3533 bufsize = ns * (TARGET_IVC2 ? 12 : 4);
3534 regbuf = assign_stack_local (SImode, bufsize, 32);
3536 move_block_from_reg (5-ns, regbuf, ns);
3538 if (TARGET_IVC2)
3540 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3541 int ofs = 4 * ns;
3543 for (i=0; i<ns; i++)
3545 int rn = (4-ns) + i + 49;
3546 rtx ptr;
3548 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3549 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3550 ofs += 8;
3553 return XEXP (regbuf, 0);
3556 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3558 static tree
3559 mep_build_builtin_va_list (void)
3561 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3562 tree record;
3565 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3567 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3568 get_identifier ("__va_next_gp"), ptr_type_node);
3569 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3570 get_identifier ("__va_next_gp_limit"),
3571 ptr_type_node);
3572 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3573 ptr_type_node);
3574 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3575 ptr_type_node);
3577 DECL_FIELD_CONTEXT (f_next_gp) = record;
3578 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3579 DECL_FIELD_CONTEXT (f_next_cop) = record;
3580 DECL_FIELD_CONTEXT (f_next_stack) = record;
3582 TYPE_FIELDS (record) = f_next_gp;
3583 TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3584 TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3585 TREE_CHAIN (f_next_cop) = f_next_stack;
3587 layout_type (record);
3589 return record;
3592 static void
3593 mep_expand_va_start (tree valist, rtx nextarg)
3595 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3596 tree next_gp, next_gp_limit, next_cop, next_stack;
3597 tree t, u;
3598 int ns;
3600 ns = cfun->machine->arg_regs_to_save;
3602 f_next_gp = TYPE_FIELDS (va_list_type_node);
3603 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3604 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3605 f_next_stack = TREE_CHAIN (f_next_cop);
3607 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3608 NULL_TREE);
3609 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3610 valist, f_next_gp_limit, NULL_TREE);
3611 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3612 NULL_TREE);
3613 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3614 valist, f_next_stack, NULL_TREE);
3616 /* va_list.next_gp = expand_builtin_saveregs (); */
3617 u = make_tree (sizetype, expand_builtin_saveregs ());
3618 u = fold_convert (ptr_type_node, u);
3619 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3620 TREE_SIDE_EFFECTS (t) = 1;
3621 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3623 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3624 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3625 size_int (4 * ns));
3626 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3627 TREE_SIDE_EFFECTS (t) = 1;
3628 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3630 /* va_list.next_cop = va_list.next_gp_limit; */
3631 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3632 TREE_SIDE_EFFECTS (t) = 1;
3633 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3635 /* va_list.next_stack = nextarg; */
3636 u = make_tree (ptr_type_node, nextarg);
3637 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3638 TREE_SIDE_EFFECTS (t) = 1;
3639 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3642 static tree
3643 mep_gimplify_va_arg_expr (tree valist, tree type,
3644 tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
3646 HOST_WIDE_INT size, rsize;
3647 bool by_reference, ivc2_vec;
3648 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3649 tree next_gp, next_gp_limit, next_cop, next_stack;
3650 tree label_sover, label_selse;
3651 tree tmp, res_addr;
3653 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3655 size = int_size_in_bytes (type);
3656 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3658 if (by_reference)
3660 type = build_pointer_type (type);
3661 size = 4;
3663 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3665 f_next_gp = TYPE_FIELDS (va_list_type_node);
3666 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3667 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3668 f_next_stack = TREE_CHAIN (f_next_cop);
3670 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3671 NULL_TREE);
3672 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3673 valist, f_next_gp_limit, NULL_TREE);
3674 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3675 NULL_TREE);
3676 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3677 valist, f_next_stack, NULL_TREE);
3679 /* if f_next_gp < f_next_gp_limit
3680 IF (VECTOR_P && IVC2)
3681 val = *f_next_cop;
3682 ELSE
3683 val = *f_next_gp;
3684 f_next_gp += 4;
3685 f_next_cop += 8;
3686 else
3687 label_selse:
3688 val = *f_next_stack;
3689 f_next_stack += rsize;
3690 label_sover:
3693 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3694 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3695 res_addr = create_tmp_var (ptr_type_node, NULL);
3697 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3698 unshare_expr (next_gp_limit));
3699 tmp = build3 (COND_EXPR, void_type_node, tmp,
3700 build1 (GOTO_EXPR, void_type_node,
3701 unshare_expr (label_selse)),
3702 NULL_TREE);
3703 gimplify_and_add (tmp, pre_p);
3705 if (ivc2_vec)
3707 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3708 gimplify_and_add (tmp, pre_p);
3710 else
3712 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3713 gimplify_and_add (tmp, pre_p);
3716 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3717 unshare_expr (next_gp), size_int (4));
3718 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3720 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3721 unshare_expr (next_cop), size_int (8));
3722 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3724 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3725 gimplify_and_add (tmp, pre_p);
3727 /* - - */
3729 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3730 gimplify_and_add (tmp, pre_p);
3732 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3733 gimplify_and_add (tmp, pre_p);
3735 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3736 unshare_expr (next_stack), size_int (rsize));
3737 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3739 /* - - */
3741 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3742 gimplify_and_add (tmp, pre_p);
3744 res_addr = fold_convert (build_pointer_type (type), res_addr);
3746 if (by_reference)
3747 res_addr = build_va_arg_indirect_ref (res_addr);
3749 return build_va_arg_indirect_ref (res_addr);
3752 void
3753 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3754 rtx libname ATTRIBUTE_UNUSED,
3755 tree fndecl ATTRIBUTE_UNUSED)
3757 pcum->nregs = 0;
3759 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3760 pcum->vliw = 1;
3761 else
3762 pcum->vliw = 0;
3766 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3767 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3769 /* VOIDmode is a signal for the backend to pass data to the call
3770 expander via the second operand to the call pattern. We use
3771 this to determine whether to use "jsr" or "jsrv". */
3772 if (mode == VOIDmode)
3773 return GEN_INT (cum.vliw);
3775 /* If we havn't run out of argument registers, return the next. */
3776 if (cum.nregs < 4)
3778 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3779 return gen_rtx_REG (mode, cum.nregs + 49);
3780 else
3781 return gen_rtx_REG (mode, cum.nregs + 1);
3784 /* Otherwise the argument goes on the stack. */
3785 return NULL_RTX;
3788 static bool
3789 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3790 enum machine_mode mode,
3791 const_tree type,
3792 bool named ATTRIBUTE_UNUSED)
3794 int size = bytesize (type, mode);
3795 if (type && TARGET_IVC2 && cum->nregs < 4 && VECTOR_TYPE_P (type))
3796 return size <= 0 || size > 8;
3797 return size <= 0 || size > 4;
3800 void
3801 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3802 enum machine_mode mode ATTRIBUTE_UNUSED,
3803 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3805 pcum->nregs += 1;
3808 bool
3809 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3811 int size = bytesize (type, BLKmode);
3812 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3813 return size >= 0 && size <= 8 ? 0 : 1;
3814 return size >= 0 && size <= 4 ? 0 : 1;
3817 static bool
3818 mep_narrow_volatile_bitfield (void)
3820 return true;
3821 return false;
3824 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3827 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3829 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3830 return gen_rtx_REG (TYPE_MODE (type), 48);
3831 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3834 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3837 mep_libcall_value (enum machine_mode mode)
3839 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3842 /* Handle pipeline hazards. */
3844 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3845 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3847 static int prev_opcode = 0;
3849 /* This isn't as optimal as it could be, because we don't know what
3850 control register the STC opcode is storing in. We only need to add
3851 the nop if it's the relevent register, but we add it for irrelevent
3852 registers also. */
3854 void
3855 mep_asm_output_opcode (FILE *file, const char *ptr)
3857 int this_opcode = op_none;
3858 const char *hazard = 0;
3860 switch (*ptr)
3862 case 'f':
3863 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3864 this_opcode = op_fsft;
3865 break;
3866 case 'r':
3867 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3868 this_opcode = op_ret;
3869 break;
3870 case 's':
3871 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3872 this_opcode = op_stc;
3873 break;
3876 if (prev_opcode == op_stc && this_opcode == op_fsft)
3877 hazard = "nop";
3878 if (prev_opcode == op_stc && this_opcode == op_ret)
3879 hazard = "nop";
3881 if (hazard)
3882 fprintf(file, "%s\t# %s-%s hazard\n\t",
3883 hazard, opnames[prev_opcode], opnames[this_opcode]);
3885 prev_opcode = this_opcode;
3888 /* Handle attributes. */
3890 static tree
3891 mep_validate_based_tiny (tree *node, tree name, tree args,
3892 int flags ATTRIBUTE_UNUSED, bool *no_add)
3894 if (TREE_CODE (*node) != VAR_DECL
3895 && TREE_CODE (*node) != POINTER_TYPE
3896 && TREE_CODE (*node) != TYPE_DECL)
3898 warning (0, "%qE attribute only applies to variables", name);
3899 *no_add = true;
3901 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3903 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3905 warning (0, "address region attributes not allowed with auto storage class");
3906 *no_add = true;
3908 /* Ignore storage attribute of pointed to variable: char __far * x; */
3909 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3911 warning (0, "address region attributes on pointed-to types ignored");
3912 *no_add = true;
3916 return NULL_TREE;
3919 static int
3920 mep_multiple_address_regions (tree list, bool check_section_attr)
3922 tree a;
3923 int count_sections = 0;
3924 int section_attr_count = 0;
3926 for (a = list; a; a = TREE_CHAIN (a))
3928 if (is_attribute_p ("based", TREE_PURPOSE (a))
3929 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3930 || is_attribute_p ("near", TREE_PURPOSE (a))
3931 || is_attribute_p ("far", TREE_PURPOSE (a))
3932 || is_attribute_p ("io", TREE_PURPOSE (a)))
3933 count_sections ++;
3934 if (check_section_attr)
3935 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3938 if (check_section_attr)
3939 return section_attr_count;
3940 else
3941 return count_sections;
3944 #define MEP_ATTRIBUTES(decl) \
3945 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3946 : DECL_ATTRIBUTES (decl) \
3947 ? (DECL_ATTRIBUTES (decl)) \
3948 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3950 static tree
3951 mep_validate_near_far (tree *node, tree name, tree args,
3952 int flags ATTRIBUTE_UNUSED, bool *no_add)
3954 if (TREE_CODE (*node) != VAR_DECL
3955 && TREE_CODE (*node) != FUNCTION_DECL
3956 && TREE_CODE (*node) != METHOD_TYPE
3957 && TREE_CODE (*node) != POINTER_TYPE
3958 && TREE_CODE (*node) != TYPE_DECL)
3960 warning (0, "%qE attribute only applies to variables and functions",
3961 name);
3962 *no_add = true;
3964 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3966 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3968 warning (0, "address region attributes not allowed with auto storage class");
3969 *no_add = true;
3971 /* Ignore storage attribute of pointed to variable: char __far * x; */
3972 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3974 warning (0, "address region attributes on pointed-to types ignored");
3975 *no_add = true;
3978 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3980 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3981 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3982 DECL_ATTRIBUTES (*node) = NULL_TREE;
3984 return NULL_TREE;
3987 static tree
3988 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3989 int flags ATTRIBUTE_UNUSED, bool *no_add)
3991 if (TREE_CODE (*node) != FUNCTION_DECL
3992 && TREE_CODE (*node) != METHOD_TYPE)
3994 warning (0, "%qE attribute only applies to functions", name);
3995 *no_add = true;
3997 return NULL_TREE;
4000 static tree
4001 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4002 int flags ATTRIBUTE_UNUSED, bool *no_add)
4004 tree function_type;
4006 if (TREE_CODE (*node) != FUNCTION_DECL)
4008 warning (0, "%qE attribute only applies to functions", name);
4009 *no_add = true;
4010 return NULL_TREE;
4013 if (DECL_DECLARED_INLINE_P (*node))
4014 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4015 DECL_UNINLINABLE (*node) = 1;
4017 function_type = TREE_TYPE (*node);
4019 if (TREE_TYPE (function_type) != void_type_node)
4020 error ("interrupt function must have return type of void");
4022 if (TYPE_ARG_TYPES (function_type)
4023 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4024 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4025 error ("interrupt function must have no arguments");
4027 return NULL_TREE;
4030 static tree
4031 mep_validate_io_cb (tree *node, tree name, tree args,
4032 int flags ATTRIBUTE_UNUSED, bool *no_add)
4034 if (TREE_CODE (*node) != VAR_DECL)
4036 warning (0, "%qE attribute only applies to variables", name);
4037 *no_add = true;
4040 if (args != NULL_TREE)
4042 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4043 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4044 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4046 warning (0, "%qE attribute allows only an integer constant argument",
4047 name);
4048 *no_add = true;
4052 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4053 TREE_THIS_VOLATILE (*node) = 1;
4055 return NULL_TREE;
4058 static tree
4059 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4060 int flags ATTRIBUTE_UNUSED, bool *no_add)
4062 if (TREE_CODE (*node) != FUNCTION_TYPE
4063 && TREE_CODE (*node) != FUNCTION_DECL
4064 && TREE_CODE (*node) != METHOD_TYPE
4065 && TREE_CODE (*node) != FIELD_DECL
4066 && TREE_CODE (*node) != TYPE_DECL)
4068 static int gave_pointer_note = 0;
4069 static int gave_array_note = 0;
4070 static const char * given_type = NULL;
4072 given_type = tree_code_name[TREE_CODE (*node)];
4073 if (TREE_CODE (*node) == POINTER_TYPE)
4074 given_type = "pointers";
4075 if (TREE_CODE (*node) == ARRAY_TYPE)
4076 given_type = "arrays";
4078 if (given_type)
4079 warning (0, "%qE attribute only applies to functions, not %s",
4080 name, given_type);
4081 else
4082 warning (0, "%qE attribute only applies to functions",
4083 name);
4084 *no_add = true;
4086 if (TREE_CODE (*node) == POINTER_TYPE
4087 && !gave_pointer_note)
4089 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4090 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4091 gave_pointer_note = 1;
4094 if (TREE_CODE (*node) == ARRAY_TYPE
4095 && !gave_array_note)
4097 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4098 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4099 gave_array_note = 1;
4102 if (!TARGET_VLIW)
4103 error ("VLIW functions are not allowed without a VLIW configuration");
4104 return NULL_TREE;
4107 static const struct attribute_spec mep_attribute_table[11] =
4109 /* name min max decl type func handler */
4110 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4111 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4112 { "near", 0, 0, false, false, false, mep_validate_near_far },
4113 { "far", 0, 0, false, false, false, mep_validate_near_far },
4114 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4115 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4116 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4117 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4118 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4119 { NULL, 0, 0, false, false, false, NULL }
4122 static bool
4123 mep_function_attribute_inlinable_p (const_tree callee)
4125 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4126 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4127 return (lookup_attribute ("disinterrupt", attrs) == 0
4128 && lookup_attribute ("interrupt", attrs) == 0);
4131 static bool
4132 mep_can_inline_p (tree caller, tree callee)
4134 if (TREE_CODE (callee) == ADDR_EXPR)
4135 callee = TREE_OPERAND (callee, 0);
4137 if (!mep_vliw_function_p (caller)
4138 && mep_vliw_function_p (callee))
4140 return false;
4142 return true;
4145 #define FUNC_CALL 1
4146 #define FUNC_DISINTERRUPT 2
4149 struct GTY(()) pragma_entry {
4150 int used;
4151 int flag;
4152 const char *funcname;
4154 typedef struct pragma_entry pragma_entry;
4156 /* Hash table of farcall-tagged sections. */
4157 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4159 static int
4160 pragma_entry_eq (const void *p1, const void *p2)
4162 const pragma_entry *old = (const pragma_entry *) p1;
4163 const char *new_name = (const char *) p2;
4165 return strcmp (old->funcname, new_name) == 0;
4168 static hashval_t
4169 pragma_entry_hash (const void *p)
4171 const pragma_entry *old = (const pragma_entry *) p;
4172 return htab_hash_string (old->funcname);
4175 static void
4176 mep_note_pragma_flag (const char *funcname, int flag)
4178 pragma_entry **slot;
4180 if (!pragma_htab)
4181 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4182 pragma_entry_eq, NULL);
4184 slot = (pragma_entry **)
4185 htab_find_slot_with_hash (pragma_htab, funcname,
4186 htab_hash_string (funcname), INSERT);
4188 if (!*slot)
4190 *slot = GGC_NEW (pragma_entry);
4191 (*slot)->flag = 0;
4192 (*slot)->used = 0;
4193 (*slot)->funcname = ggc_strdup (funcname);
4195 (*slot)->flag |= flag;
4198 static bool
4199 mep_lookup_pragma_flag (const char *funcname, int flag)
4201 pragma_entry **slot;
4203 if (!pragma_htab)
4204 return false;
4206 if (funcname[0] == '@' && funcname[2] == '.')
4207 funcname += 3;
4209 slot = (pragma_entry **)
4210 htab_find_slot_with_hash (pragma_htab, funcname,
4211 htab_hash_string (funcname), NO_INSERT);
4212 if (slot && *slot && ((*slot)->flag & flag))
4214 (*slot)->used |= flag;
4215 return true;
4217 return false;
4220 bool
4221 mep_lookup_pragma_call (const char *funcname)
4223 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4226 void
4227 mep_note_pragma_call (const char *funcname)
4229 mep_note_pragma_flag (funcname, FUNC_CALL);
4232 bool
4233 mep_lookup_pragma_disinterrupt (const char *funcname)
4235 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4238 void
4239 mep_note_pragma_disinterrupt (const char *funcname)
4241 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4244 static int
4245 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4247 const pragma_entry *d = (const pragma_entry *)(*slot);
4249 if ((d->flag & FUNC_DISINTERRUPT)
4250 && !(d->used & FUNC_DISINTERRUPT))
4251 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4252 return 1;
4255 void
4256 mep_file_cleanups (void)
4258 if (pragma_htab)
4259 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4263 static int
4264 mep_attrlist_to_encoding (tree list, tree decl)
4266 if (mep_multiple_address_regions (list, false) > 1)
4268 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4269 TREE_PURPOSE (TREE_CHAIN (list)),
4270 DECL_NAME (decl),
4271 DECL_SOURCE_LINE (decl));
4272 TREE_CHAIN (list) = NULL_TREE;
4275 while (list)
4277 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4278 return 'b';
4279 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4280 return 't';
4281 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4282 return 'n';
4283 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4284 return 'f';
4285 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4287 if (TREE_VALUE (list)
4288 && TREE_VALUE (TREE_VALUE (list))
4289 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4291 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4292 if (location >= 0
4293 && location <= 0x1000000)
4294 return 'i';
4296 return 'I';
4298 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4299 return 'c';
4300 list = TREE_CHAIN (list);
4302 if (TARGET_TF
4303 && TREE_CODE (decl) == FUNCTION_DECL
4304 && DECL_SECTION_NAME (decl) == 0)
4305 return 'f';
4306 return 0;
4309 static int
4310 mep_comp_type_attributes (const_tree t1, const_tree t2)
4312 int vliw1, vliw2;
4314 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4315 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4317 if (vliw1 != vliw2)
4318 return 0;
4320 return 1;
4323 static void
4324 mep_insert_attributes (tree decl, tree *attributes)
4326 int size;
4327 const char *secname = 0;
4328 tree attrib, attrlist;
4329 char encoding;
4331 if (TREE_CODE (decl) == FUNCTION_DECL)
4333 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4335 if (mep_lookup_pragma_disinterrupt (funcname))
4337 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4338 *attributes = chainon (*attributes, attrib);
4342 if (TREE_CODE (decl) != VAR_DECL
4343 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4344 return;
4346 if (TREE_READONLY (decl) && TARGET_DC)
4347 /* -mdc means that const variables default to the near section,
4348 regardless of the size cutoff. */
4349 return;
4351 /* User specified an attribute, so override the default.
4352 Ignore storage attribute of pointed to variable. char __far * x; */
4353 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4355 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4356 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4357 else if (DECL_ATTRIBUTES (decl) && *attributes)
4358 DECL_ATTRIBUTES (decl) = NULL_TREE;
4361 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4362 encoding = mep_attrlist_to_encoding (attrlist, decl);
4363 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4365 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4366 encoding = mep_attrlist_to_encoding (attrlist, decl);
4368 if (encoding)
4370 /* This means that the declaration has a specific section
4371 attribute, so we should not apply the default rules. */
4373 if (encoding == 'i' || encoding == 'I')
4375 tree attr = lookup_attribute ("io", attrlist);
4376 if (attr
4377 && TREE_VALUE (attr)
4378 && TREE_VALUE (TREE_VALUE(attr)))
4380 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4381 static tree previous_value = 0;
4382 static int previous_location = 0;
4383 static tree previous_name = 0;
4385 /* We take advantage of the fact that gcc will reuse the
4386 same tree pointer when applying an attribute to a
4387 list of decls, but produce a new tree for attributes
4388 on separate source lines, even when they're textually
4389 identical. This is the behavior we want. */
4390 if (TREE_VALUE (attr) == previous_value
4391 && location == previous_location)
4393 warning(0, "__io address 0x%x is the same for %qE and %qE",
4394 location, previous_name, DECL_NAME (decl));
4396 previous_name = DECL_NAME (decl);
4397 previous_location = location;
4398 previous_value = TREE_VALUE (attr);
4401 return;
4405 /* Declarations of arrays can change size. Don't trust them. */
4406 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4407 size = 0;
4408 else
4409 size = int_size_in_bytes (TREE_TYPE (decl));
4411 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4413 if (TREE_PUBLIC (decl)
4414 || DECL_EXTERNAL (decl)
4415 || TREE_STATIC (decl))
4417 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4418 int key = 0;
4420 while (*name)
4421 key += *name++;
4423 switch (key & 3)
4425 case 0:
4426 secname = "based";
4427 break;
4428 case 1:
4429 secname = "tiny";
4430 break;
4431 case 2:
4432 secname = "far";
4433 break;
4434 default:
4439 else
4441 if (size <= mep_based_cutoff && size > 0)
4442 secname = "based";
4443 else if (size <= mep_tiny_cutoff && size > 0)
4444 secname = "tiny";
4445 else if (TARGET_L)
4446 secname = "far";
4449 if (mep_const_section && TREE_READONLY (decl))
4451 if (strcmp (mep_const_section, "tiny") == 0)
4452 secname = "tiny";
4453 else if (strcmp (mep_const_section, "near") == 0)
4454 return;
4455 else if (strcmp (mep_const_section, "far") == 0)
4456 secname = "far";
4459 if (!secname)
4460 return;
4462 if (!mep_multiple_address_regions (*attributes, true)
4463 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4465 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4467 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4468 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4469 and mep_validate_based_tiny. */
4470 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4474 static void
4475 mep_encode_section_info (tree decl, rtx rtl, int first)
4477 rtx rtlname;
4478 const char *oldname;
4479 const char *secname;
4480 char encoding;
4481 char *newname;
4482 tree idp;
4483 int maxsize;
4484 tree type;
4485 tree mep_attributes;
4487 if (! first)
4488 return;
4490 if (TREE_CODE (decl) != VAR_DECL
4491 && TREE_CODE (decl) != FUNCTION_DECL)
4492 return;
4494 rtlname = XEXP (rtl, 0);
4495 if (GET_CODE (rtlname) == SYMBOL_REF)
4496 oldname = XSTR (rtlname, 0);
4497 else if (GET_CODE (rtlname) == MEM
4498 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4499 oldname = XSTR (XEXP (rtlname, 0), 0);
4500 else
4501 gcc_unreachable ();
4503 type = TREE_TYPE (decl);
4504 if (type == error_mark_node)
4505 return;
4506 mep_attributes = MEP_ATTRIBUTES (decl);
4508 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4510 if (encoding)
4512 newname = (char *) alloca (strlen (oldname) + 4);
4513 sprintf (newname, "@%c.%s", encoding, oldname);
4514 idp = get_identifier (newname);
4515 XEXP (rtl, 0) =
4516 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4518 switch (encoding)
4520 case 'b':
4521 maxsize = 128;
4522 secname = "based";
4523 break;
4524 case 't':
4525 maxsize = 65536;
4526 secname = "tiny";
4527 break;
4528 case 'n':
4529 maxsize = 0x1000000;
4530 secname = "near";
4531 break;
4532 default:
4533 maxsize = 0;
4534 secname = 0;
4535 break;
4537 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4539 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4540 oldname,
4541 (long) int_size_in_bytes (TREE_TYPE (decl)),
4542 secname,
4543 maxsize);
4547 /* Functions do not go through select_section, so we force it here
4548 by using the DECL_SECTION_NAME as if the user specified the
4549 .vtext or .ftext sections. */
4550 if (! DECL_SECTION_NAME (decl)
4551 && TREE_CODE (decl) == FUNCTION_DECL)
4553 tree secname;
4555 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4557 if (encoding == 'f')
4558 DECL_SECTION_NAME (decl) = build_string (7, ".vftext");
4559 else
4560 DECL_SECTION_NAME (decl) = build_string (6, ".vtext");
4562 else if (encoding == 'f')
4564 if (flag_function_sections || DECL_ONE_ONLY (decl))
4565 mep_unique_section (decl, 0);
4566 else
4567 DECL_SECTION_NAME (decl) = build_string (6, ".ftext");
4570 /* This is so we can control inlining. It does not matter what
4571 attribute we add, just that it has one. */
4572 secname = build_tree_list (get_identifier ("section"), DECL_SECTION_NAME (decl));
4573 if (TYPE_P (decl))
4574 TYPE_ATTRIBUTES (decl) = chainon (TYPE_ATTRIBUTES (decl), secname);
4575 else
4576 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), secname);
4580 const char *
4581 mep_strip_name_encoding (const char *sym)
4583 while (1)
4585 if (*sym == '*')
4586 sym++;
4587 else if (*sym == '@' && sym[2] == '.')
4588 sym += 3;
4589 else
4590 return sym;
4594 static section *
4595 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4596 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4598 int readonly = 1;
4600 switch (TREE_CODE (decl))
4602 case VAR_DECL:
4603 if (!TREE_READONLY (decl)
4604 || TREE_SIDE_EFFECTS (decl)
4605 || !DECL_INITIAL (decl)
4606 || (DECL_INITIAL (decl) != error_mark_node
4607 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4608 readonly = 0;
4609 break;
4610 case CONSTRUCTOR:
4611 if (! TREE_CONSTANT (decl))
4612 readonly = 0;
4613 break;
4615 default:
4616 break;
4619 if (TREE_CODE (decl) == VAR_DECL)
4621 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4623 if (name[0] == '@' && name[2] == '.')
4624 switch (name[1])
4626 case 'b':
4627 return based_section;
4629 case 't':
4630 if (readonly)
4631 return srodata_section;
4632 if (DECL_INITIAL (decl))
4633 return sdata_section;
4634 return tinybss_section;
4636 case 'f':
4637 if (readonly)
4638 return frodata_section;
4639 return far_section;
4641 case 'i':
4642 case 'I':
4643 error_at (DECL_SOURCE_LOCATION (decl),
4644 "variable %D of type %<io%> must be uninitialized", decl);
4645 return data_section;
4647 case 'c':
4648 error_at (DECL_SOURCE_LOCATION (decl),
4649 "variable %D of type %<cb%> must be uninitialized", decl);
4650 return data_section;
4654 if (readonly)
4655 return readonly_data_section;
4657 return data_section;
4660 static void
4661 mep_unique_section (tree decl, int reloc)
4663 static const char *prefixes[][2] =
4665 { ".text.", ".gnu.linkonce.t." },
4666 { ".rodata.", ".gnu.linkonce.r." },
4667 { ".data.", ".gnu.linkonce.d." },
4668 { ".based.", ".gnu.linkonce.based." },
4669 { ".sdata.", ".gnu.linkonce.s." },
4670 { ".far.", ".gnu.linkonce.far." },
4671 { ".ftext.", ".gnu.linkonce.ft." },
4672 { ".frodata.", ".gnu.linkonce.frd." },
4673 { ".srodata.", ".gnu.linkonce.srd." }
4675 int sec = 2; /* .data */
4676 int len;
4677 const char *name, *prefix;
4678 char *string;
4680 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4681 if (DECL_RTL (decl))
4682 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4684 if (TREE_CODE (decl) == FUNCTION_DECL)
4685 sec = 0; /* .text */
4686 else if (decl_readonly_section (decl, reloc))
4687 sec = 1; /* .rodata */
4689 if (name[0] == '@' && name[2] == '.')
4691 switch (name[1])
4693 case 'b':
4694 sec = 3; /* .based */
4695 break;
4696 case 't':
4697 if (sec == 1)
4698 sec = 8; /* .srodata */
4699 else
4700 sec = 4; /* .sdata */
4701 break;
4702 case 'f':
4703 if (sec == 0)
4704 sec = 6; /* .ftext */
4705 else if (sec == 1)
4706 sec = 7; /* .frodata */
4707 else
4708 sec = 5; /* .far. */
4709 break;
4711 name += 3;
4714 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4715 len = strlen (name) + strlen (prefix);
4716 string = (char *) alloca (len + 1);
4718 sprintf (string, "%s%s", prefix, name);
4720 DECL_SECTION_NAME (decl) = build_string (len, string);
4723 /* Given a decl, a section name, and whether the decl initializer
4724 has relocs, choose attributes for the section. */
4726 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4728 static unsigned int
4729 mep_section_type_flags (tree decl, const char *name, int reloc)
4731 unsigned int flags = default_section_type_flags (decl, name, reloc);
4733 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4734 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4735 flags |= SECTION_MEP_VLIW;
4737 return flags;
4740 /* Switch to an arbitrary section NAME with attributes as specified
4741 by FLAGS. ALIGN specifies any known alignment requirements for
4742 the section; 0 if the default should be used.
4744 Differs from the standard ELF version only in support of VLIW mode. */
4746 static void
4747 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4749 char flagchars[8], *f = flagchars;
4750 const char *type;
4752 if (!(flags & SECTION_DEBUG))
4753 *f++ = 'a';
4754 if (flags & SECTION_WRITE)
4755 *f++ = 'w';
4756 if (flags & SECTION_CODE)
4757 *f++ = 'x';
4758 if (flags & SECTION_SMALL)
4759 *f++ = 's';
4760 if (flags & SECTION_MEP_VLIW)
4761 *f++ = 'v';
4762 *f = '\0';
4764 if (flags & SECTION_BSS)
4765 type = "nobits";
4766 else
4767 type = "progbits";
4769 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4770 name, flagchars, type);
4772 if (flags & SECTION_CODE)
4773 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4774 asm_out_file);
4777 void
4778 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4779 int size, int align, int global)
4781 /* We intentionally don't use mep_section_tag() here. */
4782 if (name[0] == '@'
4783 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4784 && name[2] == '.')
4786 int location = -1;
4787 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4788 DECL_ATTRIBUTES (decl));
4789 if (attr
4790 && TREE_VALUE (attr)
4791 && TREE_VALUE (TREE_VALUE(attr)))
4792 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4793 if (location == -1)
4794 return;
4795 if (global)
4797 fprintf (stream, "\t.globl\t");
4798 assemble_name (stream, name);
4799 fprintf (stream, "\n");
4801 assemble_name (stream, name);
4802 fprintf (stream, " = %d\n", location);
4803 return;
4805 if (name[0] == '@' && name[2] == '.')
4807 const char *sec = 0;
4808 switch (name[1])
4810 case 'b':
4811 switch_to_section (based_section);
4812 sec = ".based";
4813 break;
4814 case 't':
4815 switch_to_section (tinybss_section);
4816 sec = ".sbss";
4817 break;
4818 case 'f':
4819 switch_to_section (farbss_section);
4820 sec = ".farbss";
4821 break;
4823 if (sec)
4825 const char *name2;
4826 int p2align = 0;
4828 while (align > BITS_PER_UNIT)
4830 align /= 2;
4831 p2align ++;
4833 name2 = TARGET_STRIP_NAME_ENCODING (name);
4834 if (global)
4835 fprintf (stream, "\t.globl\t%s\n", name2);
4836 fprintf (stream, "\t.p2align %d\n", p2align);
4837 fprintf (stream, "\t.type\t%s,@object\n", name2);
4838 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4839 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4840 return;
4844 if (!global)
4846 fprintf (stream, "\t.local\t");
4847 assemble_name (stream, name);
4848 fprintf (stream, "\n");
4850 fprintf (stream, "\t.comm\t");
4851 assemble_name (stream, name);
4852 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4855 /* Trampolines. */
4857 void
4858 mep_init_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
4860 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4861 LCT_NORMAL, VOIDmode, 3,
4862 addr, Pmode,
4863 fnaddr, Pmode,
4864 static_chain, Pmode);
4867 /* Experimental Reorg. */
4869 static bool
4870 mep_mentioned_p (rtx in,
4871 rtx reg, /* NULL for mem */
4872 int modes_too) /* if nonzero, modes must match also. */
4874 const char *fmt;
4875 int i;
4876 enum rtx_code code;
4878 if (in == 0)
4879 return false;
4880 if (reg && GET_CODE (reg) != REG)
4881 return false;
4883 if (GET_CODE (in) == LABEL_REF)
4884 return (reg == 0);
4886 code = GET_CODE (in);
4888 switch (code)
4890 case MEM:
4891 if (reg)
4892 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4893 return true;
4895 case REG:
4896 if (!reg)
4897 return false;
4898 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4899 return false;
4900 return (REGNO (in) == REGNO (reg));
4902 case SCRATCH:
4903 case CC0:
4904 case PC:
4905 case CONST_INT:
4906 case CONST_DOUBLE:
4907 return false;
4909 default:
4910 break;
4913 /* Set's source should be read-only. */
4914 if (code == SET && !reg)
4915 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4917 fmt = GET_RTX_FORMAT (code);
4919 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4921 if (fmt[i] == 'E')
4923 register int j;
4924 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4925 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4926 return true;
4928 else if (fmt[i] == 'e'
4929 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4930 return true;
4932 return false;
4935 #define EXPERIMENTAL_REGMOVE_REORG 1
4937 #if EXPERIMENTAL_REGMOVE_REORG
4939 static int
4940 mep_compatible_reg_class (int r1, int r2)
4942 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4943 return 1;
4944 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4945 return 1;
4946 return 0;
4949 static void
4950 mep_reorg_regmove (rtx insns)
4952 rtx insn, next, pat, follow, *where;
4953 int count = 0, done = 0, replace, before = 0;
4955 if (dump_file)
4956 for (insn = insns; insn; insn = NEXT_INSN (insn))
4957 if (GET_CODE (insn) == INSN)
4958 before++;
4960 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4961 set that uses the r2 and r2 dies there. We replace r2 with r1
4962 and see if it's still a valid insn. If so, delete the first set.
4963 Copied from reorg.c. */
4965 while (!done)
4967 done = 1;
4968 for (insn = insns; insn; insn = next)
4970 next = NEXT_INSN (insn);
4971 if (GET_CODE (insn) != INSN)
4972 continue;
4973 pat = PATTERN (insn);
4975 replace = 0;
4977 if (GET_CODE (pat) == SET
4978 && GET_CODE (SET_SRC (pat)) == REG
4979 && GET_CODE (SET_DEST (pat)) == REG
4980 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4981 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4983 follow = next_nonnote_insn (insn);
4984 if (dump_file)
4985 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4987 while (follow && GET_CODE (follow) == INSN
4988 && GET_CODE (PATTERN (follow)) == SET
4989 && !dead_or_set_p (follow, SET_SRC (pat))
4990 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4991 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4993 if (dump_file)
4994 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4995 follow = next_nonnote_insn (follow);
4998 if (dump_file)
4999 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5000 if (follow && GET_CODE (follow) == INSN
5001 && GET_CODE (PATTERN (follow)) == SET
5002 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5004 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5006 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5008 replace = 1;
5009 where = & SET_SRC (PATTERN (follow));
5012 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5014 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5016 replace = 1;
5017 where = & PATTERN (follow);
5023 /* If so, follow is the corresponding insn */
5024 if (replace)
5026 if (dump_file)
5028 rtx x;
5030 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5031 for (x = insn; x ;x = NEXT_INSN (x))
5033 print_rtl_single (dump_file, x);
5034 if (x == follow)
5035 break;
5036 fprintf (dump_file, "\n");
5040 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5041 follow, where))
5043 count ++;
5044 next = delete_insn (insn);
5045 if (dump_file)
5047 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5048 print_rtl_single (dump_file, follow);
5050 done = 0;
5056 if (dump_file)
5058 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5059 fprintf (dump_file, "=====\n");
5062 #endif
5065 /* Figure out where to put LABEL, which is the label for a repeat loop.
5066 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5067 the loop ends just before LAST_INSN. If SHARED, insns other than the
5068 "repeat" might use LABEL to jump to the loop's continuation point.
5070 Return the last instruction in the adjusted loop. */
5072 static rtx
5073 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5074 bool shared)
5076 rtx next, prev;
5077 int count = 0, code, icode;
5079 if (dump_file)
5080 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5081 INSN_UID (last_insn));
5083 /* Set PREV to the last insn in the loop. */
5084 prev = last_insn;
5085 if (!including)
5086 prev = PREV_INSN (prev);
5088 /* Set NEXT to the next insn after the repeat label. */
5089 next = last_insn;
5090 if (!shared)
5091 while (prev != 0)
5093 code = GET_CODE (prev);
5094 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5095 break;
5097 if (INSN_P (prev))
5099 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5100 prev = XVECEXP (PATTERN (prev), 0, 1);
5102 /* Other insns that should not be in the last two opcodes. */
5103 icode = recog_memoized (prev);
5104 if (icode < 0
5105 || icode == CODE_FOR_repeat
5106 || icode == CODE_FOR_erepeat
5107 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5108 break;
5110 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5111 is the second instruction in a VLIW bundle. In that case,
5112 loop again: if the first instruction also satisfies the
5113 conditions above then we will reach here again and put
5114 both of them into the repeat epilogue. Otherwise both
5115 should remain outside. */
5116 if (GET_MODE (prev) != BImode)
5118 count++;
5119 next = prev;
5120 if (dump_file)
5121 print_rtl_single (dump_file, next);
5122 if (count == 2)
5123 break;
5126 prev = PREV_INSN (prev);
5129 /* See if we're adding the label immediately after the repeat insn.
5130 If so, we need to separate them with a nop. */
5131 prev = prev_real_insn (next);
5132 if (prev)
5133 switch (recog_memoized (prev))
5135 case CODE_FOR_repeat:
5136 case CODE_FOR_erepeat:
5137 if (dump_file)
5138 fprintf (dump_file, "Adding nop inside loop\n");
5139 emit_insn_before (gen_nop (), next);
5140 break;
5142 default:
5143 break;
5146 /* Insert the label. */
5147 emit_label_before (label, next);
5149 /* Insert the nops. */
5150 if (dump_file && count < 2)
5151 fprintf (dump_file, "Adding %d nop%s\n\n",
5152 2 - count, count == 1 ? "" : "s");
5154 for (; count < 2; count++)
5155 if (including)
5156 last_insn = emit_insn_after (gen_nop (), last_insn);
5157 else
5158 emit_insn_before (gen_nop (), last_insn);
5160 return last_insn;
5164 void
5165 mep_emit_doloop (rtx *operands, int is_end)
5167 rtx tag;
5169 if (cfun->machine->doloop_tags == 0
5170 || cfun->machine->doloop_tag_from_end == is_end)
5172 cfun->machine->doloop_tags++;
5173 cfun->machine->doloop_tag_from_end = is_end;
5176 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5177 if (is_end)
5178 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5179 else
5180 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5184 /* Code for converting doloop_begins and doloop_ends into valid
5185 MeP instructions. A doloop_begin is just a placeholder:
5187 $count = unspec ($count)
5189 where $count is initially the number of iterations - 1.
5190 doloop_end has the form:
5192 if ($count-- == 0) goto label
5194 The counter variable is private to the doloop insns, nothing else
5195 relies on its value.
5197 There are three cases, in decreasing order of preference:
5199 1. A loop has exactly one doloop_begin and one doloop_end.
5200 The doloop_end branches to the first instruction after
5201 the doloop_begin.
5203 In this case we can replace the doloop_begin with a repeat
5204 instruction and remove the doloop_end. I.e.:
5206 $count1 = unspec ($count1)
5207 label:
5209 insn1
5210 insn2
5211 if ($count2-- == 0) goto label
5213 becomes:
5215 repeat $count1,repeat_label
5216 label:
5218 repeat_label:
5219 insn1
5220 insn2
5221 # end repeat
5223 2. As for (1), except there are several doloop_ends. One of them
5224 (call it X) falls through to a label L. All the others fall
5225 through to branches to L.
5227 In this case, we remove X and replace the other doloop_ends
5228 with branches to the repeat label. For example:
5230 $count1 = unspec ($count1)
5231 start:
5233 if ($count2-- == 0) goto label
5234 end:
5236 if ($count3-- == 0) goto label
5237 goto end
5239 becomes:
5241 repeat $count1,repeat_label
5242 start:
5244 repeat_label:
5247 # end repeat
5248 end:
5250 goto repeat_label
5252 3. The fallback case. Replace doloop_begins with:
5254 $count = $count + 1
5256 Replace doloop_ends with the equivalent of:
5258 $count = $count - 1
5259 if ($count == 0) goto label
5261 Note that this might need a scratch register if $count
5262 is stored in memory. */
5264 /* A structure describing one doloop_begin. */
5265 struct mep_doloop_begin {
5266 /* The next doloop_begin with the same tag. */
5267 struct mep_doloop_begin *next;
5269 /* The instruction itself. */
5270 rtx insn;
5272 /* The initial counter value. This is known to be a general register. */
5273 rtx counter;
5276 /* A structure describing a doloop_end. */
5277 struct mep_doloop_end {
5278 /* The next doloop_end with the same loop tag. */
5279 struct mep_doloop_end *next;
5281 /* The instruction itself. */
5282 rtx insn;
5284 /* The first instruction after INSN when the branch isn't taken. */
5285 rtx fallthrough;
5287 /* The location of the counter value. Since doloop_end_internal is a
5288 jump instruction, it has to allow the counter to be stored anywhere
5289 (any non-fixed register or memory location). */
5290 rtx counter;
5292 /* The target label (the place where the insn branches when the counter
5293 isn't zero). */
5294 rtx label;
5296 /* A scratch register. Only available when COUNTER isn't stored
5297 in a general register. */
5298 rtx scratch;
5302 /* One do-while loop. */
5303 struct mep_doloop {
5304 /* All the doloop_begins for this loop (in no particular order). */
5305 struct mep_doloop_begin *begin;
5307 /* All the doloop_ends. When there is more than one, arrange things
5308 so that the first one is the most likely to be X in case (2) above. */
5309 struct mep_doloop_end *end;
5313 /* Return true if LOOP can be converted into repeat/repeat_end form
5314 (that is, if it matches cases (1) or (2) above). */
5316 static bool
5317 mep_repeat_loop_p (struct mep_doloop *loop)
5319 struct mep_doloop_end *end;
5320 rtx fallthrough;
5322 /* There must be exactly one doloop_begin and at least one doloop_end. */
5323 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5324 return false;
5326 /* The first doloop_end (X) must branch back to the insn after
5327 the doloop_begin. */
5328 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5329 return false;
5331 /* All the other doloop_ends must branch to the same place as X.
5332 When the branch isn't taken, they must jump to the instruction
5333 after X. */
5334 fallthrough = loop->end->fallthrough;
5335 for (end = loop->end->next; end != 0; end = end->next)
5336 if (end->label != loop->end->label
5337 || !simplejump_p (end->fallthrough)
5338 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5339 return false;
5341 return true;
5345 /* The main repeat reorg function. See comment above for details. */
5347 static void
5348 mep_reorg_repeat (rtx insns)
5350 rtx insn;
5351 struct mep_doloop *loops, *loop;
5352 struct mep_doloop_begin *begin;
5353 struct mep_doloop_end *end;
5355 /* Quick exit if we haven't created any loops. */
5356 if (cfun->machine->doloop_tags == 0)
5357 return;
5359 /* Create an array of mep_doloop structures. */
5360 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5361 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5363 /* Search the function for do-while insns and group them by loop tag. */
5364 for (insn = insns; insn; insn = NEXT_INSN (insn))
5365 if (INSN_P (insn))
5366 switch (recog_memoized (insn))
5368 case CODE_FOR_doloop_begin_internal:
5369 insn_extract (insn);
5370 loop = &loops[INTVAL (recog_data.operand[2])];
5372 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5373 begin->next = loop->begin;
5374 begin->insn = insn;
5375 begin->counter = recog_data.operand[0];
5377 loop->begin = begin;
5378 break;
5380 case CODE_FOR_doloop_end_internal:
5381 insn_extract (insn);
5382 loop = &loops[INTVAL (recog_data.operand[2])];
5384 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5385 end->insn = insn;
5386 end->fallthrough = next_real_insn (insn);
5387 end->counter = recog_data.operand[0];
5388 end->label = recog_data.operand[1];
5389 end->scratch = recog_data.operand[3];
5391 /* If this insn falls through to an unconditional jump,
5392 give it a lower priority than the others. */
5393 if (loop->end != 0 && simplejump_p (end->fallthrough))
5395 end->next = loop->end->next;
5396 loop->end->next = end;
5398 else
5400 end->next = loop->end;
5401 loop->end = end;
5403 break;
5406 /* Convert the insns for each loop in turn. */
5407 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5408 if (mep_repeat_loop_p (loop))
5410 /* Case (1) or (2). */
5411 rtx repeat_label, label_ref;
5413 /* Create a new label for the repeat insn. */
5414 repeat_label = gen_label_rtx ();
5416 /* Replace the doloop_begin with a repeat. */
5417 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5418 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5419 loop->begin->insn);
5420 delete_insn (loop->begin->insn);
5422 /* Insert the repeat label before the first doloop_end.
5423 Fill the gap with nops if there are other doloop_ends. */
5424 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5425 false, loop->end->next != 0);
5427 /* Emit a repeat_end (to improve the readability of the output). */
5428 emit_insn_before (gen_repeat_end (), loop->end->insn);
5430 /* Delete the first doloop_end. */
5431 delete_insn (loop->end->insn);
5433 /* Replace the others with branches to REPEAT_LABEL. */
5434 for (end = loop->end->next; end != 0; end = end->next)
5436 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5437 delete_insn (end->insn);
5438 delete_insn (end->fallthrough);
5441 else
5443 /* Case (3). First replace all the doloop_begins with increment
5444 instructions. */
5445 for (begin = loop->begin; begin != 0; begin = begin->next)
5447 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5448 begin->counter, const1_rtx),
5449 begin->insn);
5450 delete_insn (begin->insn);
5453 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5454 for (end = loop->end; end != 0; end = end->next)
5456 rtx reg;
5458 start_sequence ();
5460 /* Load the counter value into a general register. */
5461 reg = end->counter;
5462 if (!REG_P (reg) || REGNO (reg) > 15)
5464 reg = end->scratch;
5465 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5468 /* Decrement the counter. */
5469 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5470 constm1_rtx));
5472 /* Copy it back to its original location. */
5473 if (reg != end->counter)
5474 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5476 /* Jump back to the start label. */
5477 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5478 end->label));
5479 JUMP_LABEL (insn) = end->label;
5480 LABEL_NUSES (end->label)++;
5482 /* Emit the whole sequence before the doloop_end. */
5483 insn = get_insns ();
5484 end_sequence ();
5485 emit_insn_before (insn, end->insn);
5487 /* Delete the doloop_end. */
5488 delete_insn (end->insn);
5494 static bool
5495 mep_invertable_branch_p (rtx insn)
5497 rtx cond, set;
5498 enum rtx_code old_code;
5499 int i;
5501 set = PATTERN (insn);
5502 if (GET_CODE (set) != SET)
5503 return false;
5504 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5505 return false;
5506 cond = XEXP (XEXP (set, 1), 0);
5507 old_code = GET_CODE (cond);
5508 switch (old_code)
5510 case EQ:
5511 PUT_CODE (cond, NE);
5512 break;
5513 case NE:
5514 PUT_CODE (cond, EQ);
5515 break;
5516 case LT:
5517 PUT_CODE (cond, GE);
5518 break;
5519 case GE:
5520 PUT_CODE (cond, LT);
5521 break;
5522 default:
5523 return false;
5525 INSN_CODE (insn) = -1;
5526 i = recog_memoized (insn);
5527 PUT_CODE (cond, old_code);
5528 INSN_CODE (insn) = -1;
5529 return i >= 0;
5532 static void
5533 mep_invert_branch (rtx insn, rtx after)
5535 rtx cond, set, label;
5536 int i;
5538 set = PATTERN (insn);
5540 gcc_assert (GET_CODE (set) == SET);
5541 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5543 cond = XEXP (XEXP (set, 1), 0);
5544 switch (GET_CODE (cond))
5546 case EQ:
5547 PUT_CODE (cond, NE);
5548 break;
5549 case NE:
5550 PUT_CODE (cond, EQ);
5551 break;
5552 case LT:
5553 PUT_CODE (cond, GE);
5554 break;
5555 case GE:
5556 PUT_CODE (cond, LT);
5557 break;
5558 default:
5559 gcc_unreachable ();
5561 label = gen_label_rtx ();
5562 emit_label_after (label, after);
5563 for (i=1; i<=2; i++)
5564 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5566 rtx ref = XEXP (XEXP (set, 1), i);
5567 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5568 delete_insn (XEXP (ref, 0));
5569 XEXP (ref, 0) = label;
5570 LABEL_NUSES (label) ++;
5571 JUMP_LABEL (insn) = label;
5573 INSN_CODE (insn) = -1;
5574 i = recog_memoized (insn);
5575 gcc_assert (i >= 0);
5578 static void
5579 mep_reorg_erepeat (rtx insns)
5581 rtx insn, prev, label_before, l, x;
5582 int count;
5584 for (insn = insns; insn; insn = NEXT_INSN (insn))
5585 if (JUMP_P (insn)
5586 && ! JUMP_TABLE_DATA_P (insn)
5587 && mep_invertable_branch_p (insn))
5589 if (dump_file)
5591 fprintf (dump_file, "\n------------------------------\n");
5592 fprintf (dump_file, "erepeat: considering this jump:\n");
5593 print_rtl_single (dump_file, insn);
5595 count = simplejump_p (insn) ? 0 : 1;
5596 label_before = 0;
5597 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5599 if (GET_CODE (prev) == CALL_INSN
5600 || BARRIER_P (prev))
5601 break;
5603 if (prev == JUMP_LABEL (insn))
5605 rtx newlast;
5606 if (dump_file)
5607 fprintf (dump_file, "found loop top, %d insns\n", count);
5609 if (LABEL_NUSES (prev) == 1)
5610 /* We're the only user, always safe */ ;
5611 else if (LABEL_NUSES (prev) == 2)
5613 /* See if there's a barrier before this label. If
5614 so, we know nobody inside the loop uses it.
5615 But we must be careful to put the erepeat
5616 *after* the label. */
5617 rtx barrier;
5618 for (barrier = PREV_INSN (prev);
5619 barrier && GET_CODE (barrier) == NOTE;
5620 barrier = PREV_INSN (barrier))
5622 if (barrier && GET_CODE (barrier) != BARRIER)
5623 break;
5625 else
5627 /* We don't know who else, within or without our loop, uses this */
5628 if (dump_file)
5629 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5630 break;
5633 /* Generate a label to be used by the erepat insn. */
5634 l = gen_label_rtx ();
5636 /* Insert the erepeat after INSN's target label. */
5637 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5638 LABEL_NUSES (l)++;
5639 emit_insn_after (x, prev);
5641 /* Insert the erepeat label. */
5642 newlast = (mep_insert_repeat_label_last
5643 (insn, l, !simplejump_p (insn), false));
5644 if (simplejump_p (insn))
5646 emit_insn_before (gen_erepeat_end (), insn);
5647 delete_insn (insn);
5649 else
5651 mep_invert_branch (insn, newlast);
5652 emit_insn_after (gen_erepeat_end (), newlast);
5654 break;
5657 if (LABEL_P (prev))
5659 /* A label is OK if there is exactly one user, and we
5660 can find that user before the next label. */
5661 rtx user = 0;
5662 int safe = 0;
5663 if (LABEL_NUSES (prev) == 1)
5665 for (user = PREV_INSN (prev);
5666 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5667 user = PREV_INSN (user))
5668 if (GET_CODE (user) == JUMP_INSN
5669 && JUMP_LABEL (user) == prev)
5671 safe = INSN_UID (user);
5672 break;
5675 if (!safe)
5676 break;
5677 if (dump_file)
5678 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5679 safe, INSN_UID (prev));
5682 if (INSN_P (prev))
5684 count ++;
5685 if (count == 2)
5686 label_before = prev;
5690 if (dump_file)
5691 fprintf (dump_file, "\n==============================\n");
5694 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5695 always do this on its own. */
5697 static void
5698 mep_jmp_return_reorg (rtx insns)
5700 rtx insn, label, ret;
5701 int ret_code;
5703 for (insn = insns; insn; insn = NEXT_INSN (insn))
5704 if (simplejump_p (insn))
5706 /* Find the fist real insn the jump jumps to. */
5707 label = ret = JUMP_LABEL (insn);
5708 while (ret
5709 && (GET_CODE (ret) == NOTE
5710 || GET_CODE (ret) == CODE_LABEL
5711 || GET_CODE (PATTERN (ret)) == USE))
5712 ret = NEXT_INSN (ret);
5714 if (ret)
5716 /* Is it a return? */
5717 ret_code = recog_memoized (ret);
5718 if (ret_code == CODE_FOR_return_internal
5719 || ret_code == CODE_FOR_eh_return_internal)
5721 /* It is. Replace the jump with a return. */
5722 LABEL_NUSES (label) --;
5723 if (LABEL_NUSES (label) == 0)
5724 delete_insn (label);
5725 PATTERN (insn) = copy_rtx (PATTERN (ret));
5726 INSN_CODE (insn) = -1;
5733 static void
5734 mep_reorg_addcombine (rtx insns)
5736 rtx i, n;
5738 for (i = insns; i; i = NEXT_INSN (i))
5739 if (INSN_P (i)
5740 && INSN_CODE (i) == CODE_FOR_addsi3
5741 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5742 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5743 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5744 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5746 n = NEXT_INSN (i);
5747 if (INSN_P (n)
5748 && INSN_CODE (n) == CODE_FOR_addsi3
5749 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5750 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5751 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5752 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5754 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5755 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5756 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5757 && ic + nc < 32767
5758 && ic + nc > -32768)
5760 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5761 NEXT_INSN (i) = NEXT_INSN (n);
5762 if (NEXT_INSN (i))
5763 PREV_INSN (NEXT_INSN (i)) = i;
5769 /* If this insn adjusts the stack, return the adjustment, else return
5770 zero. */
5771 static int
5772 add_sp_insn_p (rtx insn)
5774 rtx pat;
5776 if (! single_set (insn))
5777 return 0;
5778 pat = PATTERN (insn);
5779 if (GET_CODE (SET_DEST (pat)) != REG)
5780 return 0;
5781 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5782 return 0;
5783 if (GET_CODE (SET_SRC (pat)) != PLUS)
5784 return 0;
5785 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5786 return 0;
5787 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5788 return 0;
5789 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5790 return 0;
5791 return INTVAL (XEXP (SET_SRC (pat), 1));
5794 /* Check for trivial functions that set up an unneeded stack
5795 frame. */
5796 static void
5797 mep_reorg_noframe (rtx insns)
5799 rtx start_frame_insn;
5800 rtx end_frame_insn = 0;
5801 int sp_adjust, sp2;
5802 rtx sp;
5804 /* The first insn should be $sp = $sp + N */
5805 while (insns && ! INSN_P (insns))
5806 insns = NEXT_INSN (insns);
5807 if (!insns)
5808 return;
5810 sp_adjust = add_sp_insn_p (insns);
5811 if (sp_adjust == 0)
5812 return;
5814 start_frame_insn = insns;
5815 sp = SET_DEST (PATTERN (start_frame_insn));
5817 insns = next_real_insn (insns);
5819 while (insns)
5821 rtx next = next_real_insn (insns);
5822 if (!next)
5823 break;
5825 sp2 = add_sp_insn_p (insns);
5826 if (sp2)
5828 if (end_frame_insn)
5829 return;
5830 end_frame_insn = insns;
5831 if (sp2 != -sp_adjust)
5832 return;
5834 else if (mep_mentioned_p (insns, sp, 0))
5835 return;
5836 else if (CALL_P (insns))
5837 return;
5839 insns = next;
5842 if (end_frame_insn)
5844 delete_insn (start_frame_insn);
5845 delete_insn (end_frame_insn);
5849 static void
5850 mep_reorg (void)
5852 rtx insns = get_insns ();
5853 mep_reorg_addcombine (insns);
5854 #if EXPERIMENTAL_REGMOVE_REORG
5855 /* VLIW packing has been done already, so we can't just delete things. */
5856 if (!mep_vliw_function_p (cfun->decl))
5857 mep_reorg_regmove (insns);
5858 #endif
5859 mep_jmp_return_reorg (insns);
5860 mep_bundle_insns (insns);
5861 mep_reorg_repeat (insns);
5862 if (optimize
5863 && !profile_flag
5864 && !profile_arc_flag
5865 && TARGET_OPT_REPEAT
5866 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5867 mep_reorg_erepeat (insns);
5869 /* This may delete *insns so make sure it's last. */
5870 mep_reorg_noframe (insns);
5875 /*----------------------------------------------------------------------*/
5876 /* Builtins */
5877 /*----------------------------------------------------------------------*/
5879 /* Element X gives the index into cgen_insns[] of the most general
5880 implementation of intrinsic X. Unimplemented intrinsics are
5881 mapped to -1. */
5882 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5884 /* Element X gives the index of another instruction that is mapped to
5885 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5886 instruction.
5888 Things are set up so that mep_intrinsic_chain[X] < X. */
5889 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5891 /* The bitmask for the current ISA. The ISA masks are declared
5892 in mep-intrin.h. */
5893 unsigned int mep_selected_isa;
5895 struct mep_config {
5896 const char *config_name;
5897 unsigned int isa;
5900 static struct mep_config mep_configs[] = {
5901 #ifdef COPROC_SELECTION_TABLE
5902 COPROC_SELECTION_TABLE,
5903 #endif
5904 { 0, 0 }
5907 /* Initialize the global intrinsics variables above. */
5909 static void
5910 mep_init_intrinsics (void)
5912 size_t i;
5914 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5915 mep_selected_isa = mep_configs[0].isa;
5916 if (mep_config_string != 0)
5917 for (i = 0; mep_configs[i].config_name; i++)
5918 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5920 mep_selected_isa = mep_configs[i].isa;
5921 break;
5924 /* Assume all intrinsics are unavailable. */
5925 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5926 mep_intrinsic_insn[i] = -1;
5928 /* Build up the global intrinsic tables. */
5929 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5930 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5932 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5933 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5935 /* See whether we can directly move values between one coprocessor
5936 register and another. */
5937 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5938 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5939 mep_have_copro_copro_moves_p = true;
5941 /* See whether we can directly move values between core and
5942 coprocessor registers. */
5943 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5944 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5946 mep_have_core_copro_moves_p = 1;
5949 /* Declare all available intrinsic functions. Called once only. */
5951 static tree cp_data_bus_int_type_node;
5952 static tree opaque_vector_type_node;
5953 static tree v8qi_type_node;
5954 static tree v4hi_type_node;
5955 static tree v2si_type_node;
5956 static tree v8uqi_type_node;
5957 static tree v4uhi_type_node;
5958 static tree v2usi_type_node;
5960 static tree
5961 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5963 switch (cr)
5965 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5966 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5967 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5968 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5969 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5970 case cgen_regnum_operand_type_CHAR: return char_type_node;
5971 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5972 case cgen_regnum_operand_type_SI: return intSI_type_node;
5973 case cgen_regnum_operand_type_DI: return intDI_type_node;
5974 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5975 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5976 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5977 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5978 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5979 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5980 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5981 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5982 default:
5983 return void_type_node;
5987 static void
5988 mep_init_builtins (void)
5990 size_t i;
5992 if (TARGET_64BIT_CR_REGS)
5993 cp_data_bus_int_type_node = long_long_integer_type_node;
5994 else
5995 cp_data_bus_int_type_node = long_integer_type_node;
5997 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5998 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5999 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6000 v2si_type_node = build_vector_type (intSI_type_node, 2);
6001 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6002 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6003 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6005 (*lang_hooks.decls.pushdecl)
6006 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6007 cp_data_bus_int_type_node));
6009 (*lang_hooks.decls.pushdecl)
6010 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6011 opaque_vector_type_node));
6013 (*lang_hooks.decls.pushdecl)
6014 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6015 v8qi_type_node));
6016 (*lang_hooks.decls.pushdecl)
6017 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6018 v4hi_type_node));
6019 (*lang_hooks.decls.pushdecl)
6020 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6021 v2si_type_node));
6023 (*lang_hooks.decls.pushdecl)
6024 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6025 v8uqi_type_node));
6026 (*lang_hooks.decls.pushdecl)
6027 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6028 v4uhi_type_node));
6029 (*lang_hooks.decls.pushdecl)
6030 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6031 v2usi_type_node));
6033 /* Intrinsics like mep_cadd3 are implemented with two groups of
6034 instructions, one which uses UNSPECs and one which uses a specific
6035 rtl code such as PLUS. Instructions in the latter group belong
6036 to GROUP_KNOWN_CODE.
6038 In such cases, the intrinsic will have two entries in the global
6039 tables above. The unspec form is accessed using builtin functions
6040 while the specific form is accessed using the mep_* enum in
6041 mep-intrin.h.
6043 The idea is that __cop arithmetic and builtin functions have
6044 different optimization requirements. If mep_cadd3() appears in
6045 the source code, the user will surely except gcc to use cadd3
6046 rather than a work-alike such as add3. However, if the user
6047 just writes "a + b", where a or b are __cop variables, it is
6048 reasonable for gcc to choose a core instruction rather than
6049 cadd3 if it believes that is more optimal. */
6050 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6051 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6052 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6054 tree ret_type = void_type_node;
6055 tree bi_type;
6057 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6058 continue;
6060 if (cgen_insns[i].cret_p)
6061 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6063 bi_type = build_function_type (ret_type, 0);
6064 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6065 bi_type,
6066 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6070 /* Report the unavailablity of the given intrinsic. */
6072 #if 1
6073 static void
6074 mep_intrinsic_unavailable (int intrinsic)
6076 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6078 if (already_reported_p[intrinsic])
6079 return;
6081 if (mep_intrinsic_insn[intrinsic] < 0)
6082 error ("coprocessor intrinsic %qs is not available in this configuration",
6083 cgen_intrinsics[intrinsic]);
6084 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6085 error ("%qs is not available in VLIW functions",
6086 cgen_intrinsics[intrinsic]);
6087 else
6088 error ("%qs is not available in non-VLIW functions",
6089 cgen_intrinsics[intrinsic]);
6091 already_reported_p[intrinsic] = 1;
6093 #endif
6096 /* See if any implementation of INTRINSIC is available to the
6097 current function. If so, store the most general implementation
6098 in *INSN_PTR and return true. Return false otherwise. */
6100 static bool
6101 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6103 int i;
6105 i = mep_intrinsic_insn[intrinsic];
6106 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6107 i = mep_intrinsic_chain[i];
6109 if (i >= 0)
6111 *insn_ptr = &cgen_insns[i];
6112 return true;
6114 return false;
6118 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6119 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6120 try using a work-alike instead. In this case, the returned insn
6121 may have three operands rather than two. */
6123 static bool
6124 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6126 size_t i;
6128 if (intrinsic == mep_cmov)
6130 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6131 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6132 return true;
6133 return false;
6135 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6139 /* If ARG is a register operand that is the same size as MODE, convert it
6140 to MODE using a subreg. Otherwise return ARG as-is. */
6142 static rtx
6143 mep_convert_arg (enum machine_mode mode, rtx arg)
6145 if (GET_MODE (arg) != mode
6146 && register_operand (arg, VOIDmode)
6147 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6148 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6149 return arg;
6153 /* Apply regnum conversions to ARG using the description given by REGNUM.
6154 Return the new argument on success and null on failure. */
6156 static rtx
6157 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6159 if (regnum->count == 0)
6160 return arg;
6162 if (GET_CODE (arg) != CONST_INT
6163 || INTVAL (arg) < 0
6164 || INTVAL (arg) >= regnum->count)
6165 return 0;
6167 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6171 /* Try to make intrinsic argument ARG match the given operand.
6172 UNSIGNED_P is true if the argument has an unsigned type. */
6174 static rtx
6175 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6176 int unsigned_p)
6178 if (GET_CODE (arg) == CONST_INT)
6180 /* CONST_INTs can only be bound to integer operands. */
6181 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6182 return 0;
6184 else if (GET_CODE (arg) == CONST_DOUBLE)
6185 /* These hold vector constants. */;
6186 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6188 /* If the argument is a different size from what's expected, we must
6189 have a value in the right mode class in order to convert it. */
6190 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6191 return 0;
6193 /* If the operand is an rvalue, promote or demote it to match the
6194 operand's size. This might not need extra instructions when
6195 ARG is a register value. */
6196 if (operand->constraint[0] != '=')
6197 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6200 /* If the operand is an lvalue, bind the operand to a new register.
6201 The caller will copy this value into ARG after the main
6202 instruction. By doing this always, we produce slightly more
6203 optimal code. */
6204 /* But not for control registers. */
6205 if (operand->constraint[0] == '='
6206 && (! REG_P (arg)
6207 || ! (CCR_REGNO_P (REGNO (arg)) || CR_REGNO_P (REGNO (arg)))
6209 return gen_reg_rtx (operand->mode);
6211 /* Try simple mode punning. */
6212 arg = mep_convert_arg (operand->mode, arg);
6213 if (operand->predicate (arg, operand->mode))
6214 return arg;
6216 /* See if forcing the argument into a register will make it match. */
6217 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6218 arg = force_reg (operand->mode, arg);
6219 else
6220 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6221 if (operand->predicate (arg, operand->mode))
6222 return arg;
6224 return 0;
6228 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6229 function FNNAME. OPERAND describes the operand to which ARGNUM
6230 is mapped. */
6232 static void
6233 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6234 int argnum, tree fnname)
6236 size_t i;
6238 if (GET_CODE (arg) == CONST_INT)
6239 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6240 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6242 const struct cgen_immediate_predicate *predicate;
6243 HOST_WIDE_INT argval;
6245 predicate = &cgen_immediate_predicates[i];
6246 argval = INTVAL (arg);
6247 if (argval < predicate->lower || argval >= predicate->upper)
6248 error ("argument %d of %qE must be in the range %d...%d",
6249 argnum, fnname, predicate->lower, predicate->upper - 1);
6250 else
6251 error ("argument %d of %qE must be a multiple of %d",
6252 argnum, fnname, predicate->align);
6253 return;
6256 error ("incompatible type for argument %d of %qE", argnum, fnname);
6259 static rtx
6260 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6261 rtx subtarget ATTRIBUTE_UNUSED,
6262 enum machine_mode mode ATTRIBUTE_UNUSED,
6263 int ignore ATTRIBUTE_UNUSED)
6265 rtx pat, op[10], arg[10];
6266 unsigned int a;
6267 int opindex, unsigned_p[10];
6268 tree fndecl, args;
6269 unsigned int n_args;
6270 tree fnname;
6271 const struct cgen_insn *cgen_insn;
6272 const struct insn_data *idata;
6273 int first_arg = 0;
6274 int return_type = void_type_node;
6275 int builtin_n_args;
6277 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6278 fnname = DECL_NAME (fndecl);
6280 /* Find out which instruction we should emit. Note that some coprocessor
6281 intrinsics may only be available in VLIW mode, or only in normal mode. */
6282 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6284 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6285 return error_mark_node;
6287 idata = &insn_data[cgen_insn->icode];
6289 builtin_n_args = cgen_insn->num_args;
6291 if (cgen_insn->cret_p)
6293 if (cgen_insn->cret_p > 1)
6294 builtin_n_args ++;
6295 first_arg = 1;
6296 return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6297 builtin_n_args --;
6300 /* Evaluate each argument. */
6301 n_args = call_expr_nargs (exp);
6303 if (n_args < builtin_n_args)
6305 error ("too few arguments to %qE", fnname);
6306 return error_mark_node;
6308 if (n_args > builtin_n_args)
6310 error ("too many arguments to %qE", fnname);
6311 return error_mark_node;
6314 for (a = first_arg; a < builtin_n_args+first_arg; a++)
6316 tree value;
6318 args = CALL_EXPR_ARG (exp, a-first_arg);
6320 value = args;
6322 #if 0
6323 if (cgen_insn->regnums[a].reference_p)
6325 if (TREE_CODE (value) != ADDR_EXPR)
6327 debug_tree(value);
6328 error ("argument %d of %qE must be an address", a+1, fnname);
6329 return error_mark_node;
6331 value = TREE_OPERAND (value, 0);
6333 #endif
6335 /* If the argument has been promoted to int, get the unpromoted
6336 value. This is necessary when sub-int memory values are bound
6337 to reference parameters. */
6338 if (TREE_CODE (value) == NOP_EXPR
6339 && TREE_TYPE (value) == integer_type_node
6340 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6341 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6342 < TYPE_PRECISION (TREE_TYPE (value))))
6343 value = TREE_OPERAND (value, 0);
6345 /* If the argument has been promoted to double, get the unpromoted
6346 SFmode value. This is necessary for FMAX support, for example. */
6347 if (TREE_CODE (value) == NOP_EXPR
6348 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6349 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6350 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6351 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6352 value = TREE_OPERAND (value, 0);
6354 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6355 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6356 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6357 if (cgen_insn->regnums[a].reference_p)
6359 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6360 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6362 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6364 if (arg[a] == 0)
6366 error ("argument %d of %qE must be in the range %d...%d",
6367 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6368 return error_mark_node;
6372 for (a=0; a<first_arg; a++)
6374 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6375 arg[a] = target;
6376 else
6377 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6380 /* Convert the arguments into a form suitable for the intrinsic.
6381 Report an error if this isn't possible. */
6382 for (opindex = 0; opindex < idata->n_operands; opindex++)
6384 a = cgen_insn->op_mapping[opindex];
6385 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6386 arg[a], unsigned_p[a]);
6387 if (op[opindex] == 0)
6389 mep_incompatible_arg (&idata->operand[opindex],
6390 arg[a], a + 1 - first_arg, fnname);
6391 return error_mark_node;
6395 /* Emit the instruction. */
6396 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6397 op[5], op[6], op[7], op[8], op[9]);
6399 if (GET_CODE (pat) == SET
6400 && GET_CODE (SET_DEST (pat)) == PC
6401 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6402 emit_jump_insn (pat);
6403 else
6404 emit_insn (pat);
6406 /* Copy lvalues back to their final locations. */
6407 for (opindex = 0; opindex < idata->n_operands; opindex++)
6408 if (idata->operand[opindex].constraint[0] == '=')
6410 a = cgen_insn->op_mapping[opindex];
6411 if (a >= first_arg)
6413 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6414 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6415 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6416 op[opindex]));
6417 else
6419 /* First convert the operand to the right mode, then copy it
6420 into the destination. Doing the conversion as a separate
6421 step (rather than using convert_move) means that we can
6422 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6423 refer to the same register. */
6424 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6425 op[opindex], unsigned_p[a]);
6426 if (!rtx_equal_p (arg[a], op[opindex]))
6427 emit_move_insn (arg[a], op[opindex]);
6432 if (first_arg > 0 && target && target != op[0])
6434 emit_move_insn (target, op[0]);
6437 return target;
6440 static bool
6441 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6443 return false;
6446 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6447 a global register. */
6449 static int
6450 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6452 int regno;
6453 rtx x = *loc;
6455 if (! x)
6456 return 0;
6458 switch (GET_CODE (x))
6460 case SUBREG:
6461 if (REG_P (SUBREG_REG (x)))
6463 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6464 && global_regs[subreg_regno (x)])
6465 return 1;
6466 return 0;
6468 break;
6470 case REG:
6471 regno = REGNO (x);
6472 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6473 return 1;
6474 return 0;
6476 case SCRATCH:
6477 case PC:
6478 case CC0:
6479 case CONST_INT:
6480 case CONST_DOUBLE:
6481 case CONST:
6482 case LABEL_REF:
6483 return 0;
6485 case CALL:
6486 /* A non-constant call might use a global register. */
6487 return 1;
6489 default:
6490 break;
6493 return 0;
6496 /* Returns nonzero if X mentions a global register. */
6498 static int
6499 global_reg_mentioned_p (rtx x)
6501 if (INSN_P (x))
6503 if (CALL_P (x))
6505 if (! RTL_CONST_OR_PURE_CALL_P (x))
6506 return 1;
6507 x = CALL_INSN_FUNCTION_USAGE (x);
6508 if (x == 0)
6509 return 0;
6511 else
6512 x = PATTERN (x);
6515 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6517 /* Scheduling hooks for VLIW mode.
6519 Conceptually this is very simple: we have a two-pack architecture
6520 that takes one core insn and one coprocessor insn to make up either
6521 a 32- or 64-bit instruction word (depending on the option bit set in
6522 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6523 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6524 and one 48-bit cop insn or two 32-bit core/cop insns.
6526 In practice, instruction selection will be a bear. Consider in
6527 VL64 mode the following insns
6529 add $1, 1
6530 cmov $cr0, $0
6532 these cannot pack, since the add is a 16-bit core insn and cmov
6533 is a 32-bit cop insn. However,
6535 add3 $1, $1, 1
6536 cmov $cr0, $0
6538 packs just fine. For good VLIW code generation in VL64 mode, we
6539 will have to have 32-bit alternatives for many of the common core
6540 insns. Not implemented. */
6542 static int
6543 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6545 int cost_specified;
6547 if (REG_NOTE_KIND (link) != 0)
6549 /* See whether INSN and DEP_INSN are intrinsics that set the same
6550 hard register. If so, it is more important to free up DEP_INSN
6551 than it is to free up INSN.
6553 Note that intrinsics like mep_mulr are handled differently from
6554 the equivalent mep.md patterns. In mep.md, if we don't care
6555 about the value of $lo and $hi, the pattern will just clobber
6556 the registers, not set them. Since clobbers don't count as
6557 output dependencies, it is often possible to reorder two mulrs,
6558 even after reload.
6560 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6561 so any pair of mep_mulr()s will be inter-dependent. We should
6562 therefore give the first mep_mulr() a higher priority. */
6563 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6564 && global_reg_mentioned_p (PATTERN (insn))
6565 && global_reg_mentioned_p (PATTERN (dep_insn)))
6566 return 1;
6568 /* If the dependence is an anti or output dependence, assume it
6569 has no cost. */
6570 return 0;
6573 /* If we can't recognize the insns, we can't really do anything. */
6574 if (recog_memoized (dep_insn) < 0)
6575 return cost;
6577 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6578 attribute instead. */
6579 if (!TARGET_H1)
6581 cost_specified = get_attr_latency (dep_insn);
6582 if (cost_specified != 0)
6583 return cost_specified;
6586 return cost;
6589 /* ??? We don't properly compute the length of a load/store insn,
6590 taking into account the addressing mode. */
6592 static int
6593 mep_issue_rate (void)
6595 return TARGET_IVC2 ? 3 : 2;
6598 /* Return true if function DECL was declared with the vliw attribute. */
6600 bool
6601 mep_vliw_function_p (tree decl)
6603 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6606 static rtx
6607 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6609 int i;
6611 for (i = nready - 1; i >= 0; --i)
6613 rtx insn = ready[i];
6614 if (recog_memoized (insn) >= 0
6615 && get_attr_slot (insn) == slot
6616 && get_attr_length (insn) == length)
6617 return insn;
6620 return NULL_RTX;
6623 static void
6624 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6626 int i;
6628 for (i = 0; i < nready; ++i)
6629 if (ready[i] == insn)
6631 for (; i < nready - 1; ++i)
6632 ready[i] = ready[i + 1];
6633 ready[i] = insn;
6634 return;
6637 gcc_unreachable ();
6640 static void
6641 mep_print_sched_insn (FILE *dump, rtx insn)
6643 const char *slots = "none";
6644 const char *name = NULL;
6645 int code;
6646 char buf[30];
6648 if (GET_CODE (PATTERN (insn)) == SET
6649 || GET_CODE (PATTERN (insn)) == PARALLEL)
6651 switch (get_attr_slots (insn))
6653 case SLOTS_CORE: slots = "core"; break;
6654 case SLOTS_C3: slots = "c3"; break;
6655 case SLOTS_P0: slots = "p0"; break;
6656 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6657 case SLOTS_P0_P1: slots = "p0,p1"; break;
6658 case SLOTS_P0S: slots = "p0s"; break;
6659 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6660 case SLOTS_P1: slots = "p1"; break;
6661 default:
6662 sprintf(buf, "%d", get_attr_slots (insn));
6663 slots = buf;
6664 break;
6667 if (GET_CODE (PATTERN (insn)) == USE)
6668 slots = "use";
6670 code = INSN_CODE (insn);
6671 if (code >= 0)
6672 name = get_insn_name (code);
6673 if (!name)
6674 name = "{unknown}";
6676 fprintf (dump,
6677 "insn %4d %4d %8s %s\n",
6678 code,
6679 INSN_UID (insn),
6680 name,
6681 slots);
6684 static int
6685 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6686 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6687 int *pnready, int clock ATTRIBUTE_UNUSED)
6689 int nready = *pnready;
6690 rtx core_insn, cop_insn;
6691 int i;
6693 if (dump && sched_verbose > 1)
6695 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6696 for (i=0; i<nready; i++)
6697 mep_print_sched_insn (dump, ready[i]);
6698 fprintf (dump, "\n");
6701 if (!mep_vliw_function_p (cfun->decl))
6702 return 1;
6703 if (nready < 2)
6704 return 1;
6706 /* IVC2 uses a DFA to determine what's ready and what's not. */
6707 if (TARGET_IVC2)
6708 return nready;
6710 /* We can issue either a core or coprocessor instruction.
6711 Look for a matched pair of insns to reorder. If we don't
6712 find any, don't second-guess the scheduler's priorities. */
6714 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6715 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6716 TARGET_OPT_VL64 ? 6 : 2)))
6718 else if (TARGET_OPT_VL64
6719 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6720 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6722 else
6723 /* We didn't find a pair. Issue the single insn at the head
6724 of the ready list. */
6725 return 1;
6727 /* Reorder the two insns first. */
6728 mep_move_ready_insn (ready, nready, core_insn);
6729 mep_move_ready_insn (ready, nready - 1, cop_insn);
6730 return 2;
6733 /* A for_each_rtx callback. Return true if *X is a register that is
6734 set by insn PREV. */
6736 static int
6737 mep_store_find_set (rtx *x, void *prev)
6739 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6742 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6743 not the containing insn. */
6745 static bool
6746 mep_store_data_bypass_1 (rtx prev, rtx pat)
6748 /* Cope with intrinsics like swcpa. */
6749 if (GET_CODE (pat) == PARALLEL)
6751 int i;
6753 for (i = 0; i < XVECLEN (pat, 0); i++)
6754 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6755 return true;
6757 return false;
6760 /* Check for some sort of store. */
6761 if (GET_CODE (pat) != SET
6762 || GET_CODE (SET_DEST (pat)) != MEM)
6763 return false;
6765 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6766 The first operand to the unspec is the store data and the other operands
6767 are used to calculate the address. */
6768 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6770 rtx src;
6771 int i;
6773 src = SET_SRC (pat);
6774 for (i = 1; i < XVECLEN (src, 0); i++)
6775 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6776 return false;
6778 return true;
6781 /* Otherwise just check that PREV doesn't modify any register mentioned
6782 in the memory destination. */
6783 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6786 /* Return true if INSN is a store instruction and if the store address
6787 has no true dependence on PREV. */
6789 bool
6790 mep_store_data_bypass_p (rtx prev, rtx insn)
6792 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6795 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6796 is a register other than LO or HI and if PREV sets *X. */
6798 static int
6799 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6801 return (REG_P (*x)
6802 && REGNO (*x) != LO_REGNO
6803 && REGNO (*x) != HI_REGNO
6804 && reg_set_p (*x, (const_rtx) prev));
6807 /* Return true if, apart from HI/LO, there are no true dependencies
6808 between multiplication instructions PREV and INSN. */
6810 bool
6811 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6813 rtx pat;
6815 pat = PATTERN (insn);
6816 if (GET_CODE (pat) == PARALLEL)
6817 pat = XVECEXP (pat, 0, 0);
6818 return (GET_CODE (pat) == SET
6819 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6822 /* Return true if INSN is an ldc instruction that issues to the
6823 MeP-h1 integer pipeline. This is true for instructions that
6824 read from PSW, LP, SAR, HI and LO. */
6826 bool
6827 mep_ipipe_ldc_p (rtx insn)
6829 rtx pat, src;
6831 pat = PATTERN (insn);
6833 /* Cope with instrinsics that set both a hard register and its shadow.
6834 The set of the hard register comes first. */
6835 if (GET_CODE (pat) == PARALLEL)
6836 pat = XVECEXP (pat, 0, 0);
6838 if (GET_CODE (pat) == SET)
6840 src = SET_SRC (pat);
6842 /* Cope with intrinsics. The first operand to the unspec is
6843 the source register. */
6844 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6845 src = XVECEXP (src, 0, 0);
6847 if (REG_P (src))
6848 switch (REGNO (src))
6850 case PSW_REGNO:
6851 case LP_REGNO:
6852 case SAR_REGNO:
6853 case HI_REGNO:
6854 case LO_REGNO:
6855 return true;
6858 return false;
6861 /* Create a VLIW bundle from core instruction CORE and coprocessor
6862 instruction COP. COP always satisfies INSN_P, but CORE can be
6863 either a new pattern or an existing instruction.
6865 Emit the bundle in place of COP and return it. */
6867 static rtx
6868 mep_make_bundle (rtx core, rtx cop)
6870 rtx insn;
6872 /* If CORE is an existing instruction, remove it, otherwise put
6873 the new pattern in an INSN harness. */
6874 if (INSN_P (core))
6875 remove_insn (core);
6876 else
6877 core = make_insn_raw (core);
6879 /* Generate the bundle sequence and replace COP with it. */
6880 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6881 insn = emit_insn_after (insn, cop);
6882 remove_insn (cop);
6884 /* Set up the links of the insns inside the SEQUENCE. */
6885 PREV_INSN (core) = PREV_INSN (insn);
6886 NEXT_INSN (core) = cop;
6887 PREV_INSN (cop) = core;
6888 NEXT_INSN (cop) = NEXT_INSN (insn);
6890 /* Set the VLIW flag for the coprocessor instruction. */
6891 PUT_MODE (core, VOIDmode);
6892 PUT_MODE (cop, BImode);
6894 /* Derive a location for the bundle. Individual instructions cannot
6895 have their own location because there can be no assembler labels
6896 between CORE and COP. */
6897 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6898 INSN_LOCATOR (core) = 0;
6899 INSN_LOCATOR (cop) = 0;
6901 return insn;
6904 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6906 static void
6907 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6909 rtx * pinsn = (rtx *) data;
6911 if (*pinsn && reg_mentioned_p (x, *pinsn))
6912 *pinsn = NULL_RTX;
6915 /* Return true if anything in insn X is (anti,output,true) dependent on
6916 anything in insn Y. */
6918 static int
6919 mep_insn_dependent_p (rtx x, rtx y)
6921 rtx tmp;
6923 gcc_assert (INSN_P (x));
6924 gcc_assert (INSN_P (y));
6926 tmp = PATTERN (y);
6927 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6928 if (tmp == NULL_RTX)
6929 return 1;
6931 tmp = PATTERN (x);
6932 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6933 if (tmp == NULL_RTX)
6934 return 1;
6936 return 0;
6939 static int
6940 core_insn_p (rtx insn)
6942 if (GET_CODE (PATTERN (insn)) == USE)
6943 return 0;
6944 if (get_attr_slot (insn) == SLOT_CORE)
6945 return 1;
6946 return 0;
6949 /* Mark coprocessor instructions that can be bundled together with
6950 the immediately preceeding core instruction. This is later used
6951 to emit the "+" that tells the assembler to create a VLIW insn.
6953 For unbundled insns, the assembler will automatically add coprocessor
6954 nops, and 16-bit core nops. Due to an apparent oversight in the
6955 spec, the assembler will _not_ automatically add 32-bit core nops,
6956 so we have to emit those here.
6958 Called from mep_insn_reorg. */
6960 static void
6961 mep_bundle_insns (rtx insns)
6963 rtx insn, last = NULL_RTX, first = NULL_RTX;
6964 int saw_scheduling = 0;
6966 /* Only do bundling if we're in vliw mode. */
6967 if (!mep_vliw_function_p (cfun->decl))
6968 return;
6970 /* The first insn in a bundle are TImode, the remainder are
6971 VOIDmode. After this function, the first has VOIDmode and the
6972 rest have BImode. */
6974 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6976 /* First, move any NOTEs that are within a bundle, to the beginning
6977 of the bundle. */
6978 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6980 if (NOTE_P (insn) && first)
6981 /* Don't clear FIRST. */;
6983 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6984 first = insn;
6986 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6988 rtx note, prev;
6990 /* INSN is part of a bundle; FIRST is the first insn in that
6991 bundle. Move all intervening notes out of the bundle.
6992 In addition, since the debug pass may insert a label
6993 whenever the current line changes, set the location info
6994 for INSN to match FIRST. */
6996 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
6998 note = PREV_INSN (insn);
6999 while (note && note != first)
7001 prev = PREV_INSN (note);
7003 if (NOTE_P (note))
7005 /* Remove NOTE from here... */
7006 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7007 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7008 /* ...and put it in here. */
7009 NEXT_INSN (note) = first;
7010 PREV_INSN (note) = PREV_INSN (first);
7011 NEXT_INSN (PREV_INSN (note)) = note;
7012 PREV_INSN (NEXT_INSN (note)) = note;
7015 note = prev;
7019 else if (!NONJUMP_INSN_P (insn))
7020 first = 0;
7023 /* Now fix up the bundles. */
7024 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7026 if (NOTE_P (insn))
7027 continue;
7029 if (!NONJUMP_INSN_P (insn))
7031 last = 0;
7032 continue;
7035 /* If we're not optimizing enough, there won't be scheduling
7036 info. We detect that here. */
7037 if (GET_MODE (insn) == TImode)
7038 saw_scheduling = 1;
7039 if (!saw_scheduling)
7040 continue;
7042 if (TARGET_IVC2)
7044 rtx core_insn = NULL_RTX;
7046 /* IVC2 slots are scheduled by DFA, so we just accept
7047 whatever the scheduler gives us. However, we must make
7048 sure the core insn (if any) is the first in the bundle.
7049 The IVC2 assembler can insert whatever NOPs are needed,
7050 and allows a COP insn to be first. */
7052 if (NONJUMP_INSN_P (insn)
7053 && GET_CODE (PATTERN (insn)) != USE
7054 && GET_MODE (insn) == TImode)
7056 for (last = insn;
7057 NEXT_INSN (last)
7058 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7059 && NONJUMP_INSN_P (NEXT_INSN (last));
7060 last = NEXT_INSN (last))
7062 if (core_insn_p (last))
7063 core_insn = last;
7065 if (core_insn_p (last))
7066 core_insn = last;
7068 if (core_insn && core_insn != insn)
7070 /* Swap core insn to first in the bundle. */
7072 /* Remove core insn. */
7073 if (PREV_INSN (core_insn))
7074 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7075 if (NEXT_INSN (core_insn))
7076 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7078 /* Re-insert core insn. */
7079 PREV_INSN (core_insn) = PREV_INSN (insn);
7080 NEXT_INSN (core_insn) = insn;
7082 if (PREV_INSN (core_insn))
7083 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7084 PREV_INSN (insn) = core_insn;
7086 PUT_MODE (core_insn, TImode);
7087 PUT_MODE (insn, VOIDmode);
7091 /* The first insn has TImode, the rest have VOIDmode */
7092 if (GET_MODE (insn) == TImode)
7093 PUT_MODE (insn, VOIDmode);
7094 else
7095 PUT_MODE (insn, BImode);
7096 continue;
7099 PUT_MODE (insn, VOIDmode);
7100 if (recog_memoized (insn) >= 0
7101 && get_attr_slot (insn) == SLOT_COP)
7103 if (GET_CODE (insn) == JUMP_INSN
7104 || ! last
7105 || recog_memoized (last) < 0
7106 || get_attr_slot (last) != SLOT_CORE
7107 || (get_attr_length (insn)
7108 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7109 || mep_insn_dependent_p (insn, last))
7111 switch (get_attr_length (insn))
7113 case 8:
7114 break;
7115 case 6:
7116 insn = mep_make_bundle (gen_nop (), insn);
7117 break;
7118 case 4:
7119 if (TARGET_OPT_VL64)
7120 insn = mep_make_bundle (gen_nop32 (), insn);
7121 break;
7122 case 2:
7123 if (TARGET_OPT_VL64)
7124 error ("2 byte cop instructions are"
7125 " not allowed in 64-bit VLIW mode");
7126 else
7127 insn = mep_make_bundle (gen_nop (), insn);
7128 break;
7129 default:
7130 error ("unexpected %d byte cop instruction",
7131 get_attr_length (insn));
7132 break;
7135 else
7136 insn = mep_make_bundle (last, insn);
7139 last = insn;
7144 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7145 Return true on success. This function can fail if the intrinsic
7146 is unavailable or if the operands don't satisfy their predicates. */
7148 bool
7149 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7151 const struct cgen_insn *cgen_insn;
7152 const struct insn_data *idata;
7153 rtx newop[10];
7154 int i;
7156 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7157 return false;
7159 idata = &insn_data[cgen_insn->icode];
7160 for (i = 0; i < idata->n_operands; i++)
7162 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7163 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7164 return false;
7167 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7168 newop[3], newop[4], newop[5],
7169 newop[6], newop[7], newop[8]));
7171 return true;
7175 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7176 OPERANDS[0]. Report an error if the instruction could not
7177 be synthesized. OPERANDS[1] is a register_operand. For sign
7178 and zero extensions, it may be smaller than SImode. */
7180 bool
7181 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7182 rtx * operands ATTRIBUTE_UNUSED)
7184 return false;
7188 /* Likewise, but apply a binary operation to OPERANDS[1] and
7189 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7190 can be a general_operand.
7192 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7193 third operand. REG and REG3 take register operands only. */
7195 bool
7196 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7197 int ATTRIBUTE_UNUSED immediate3,
7198 int ATTRIBUTE_UNUSED reg,
7199 int ATTRIBUTE_UNUSED reg3,
7200 rtx * operands ATTRIBUTE_UNUSED)
7202 return false;
7205 static bool
7206 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7208 switch (code)
7210 case CONST_INT:
7211 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7212 *total = 0;
7213 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7214 *total = 1;
7215 else
7216 *total = 3;
7217 return true;
7219 case SYMBOL_REF:
7220 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7221 return true;
7223 case MULT:
7224 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7225 ? COSTS_N_INSNS (3)
7226 : COSTS_N_INSNS (2));
7227 return true;
7229 return false;
7232 static int
7233 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7235 return 1;
7238 static bool
7239 mep_handle_option (size_t code,
7240 const char *arg ATTRIBUTE_UNUSED,
7241 int value ATTRIBUTE_UNUSED)
7243 int i;
7245 switch (code)
7247 case OPT_mall_opts:
7248 target_flags |= MEP_ALL_OPTS;
7249 break;
7251 case OPT_mno_opts:
7252 target_flags &= ~ MEP_ALL_OPTS;
7253 break;
7255 case OPT_mcop64:
7256 target_flags |= MASK_COP;
7257 target_flags |= MASK_64BIT_CR_REGS;
7258 break;
7260 case OPT_mtiny_:
7261 option_mtiny_specified = 1;
7263 case OPT_mivc2:
7264 target_flags |= MASK_COP;
7265 target_flags |= MASK_64BIT_CR_REGS;
7266 target_flags |= MASK_VLIW;
7267 target_flags |= MASK_OPT_VL64;
7268 target_flags |= MASK_IVC2;
7270 for (i=0; i<32; i++)
7271 fixed_regs[i+48] = 0;
7272 for (i=0; i<32; i++)
7273 call_used_regs[i+48] = 1;
7274 for (i=6; i<8; i++)
7275 call_used_regs[i+48] = 0;
7277 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7278 RN (0, "$csar0");
7279 RN (1, "$cc");
7280 RN (4, "$cofr0");
7281 RN (5, "$cofr1");
7282 RN (6, "$cofa0");
7283 RN (7, "$cofa1");
7284 RN (15, "$csar1");
7286 RN (16, "$acc0_0");
7287 RN (17, "$acc0_1");
7288 RN (18, "$acc0_2");
7289 RN (19, "$acc0_3");
7290 RN (20, "$acc0_4");
7291 RN (21, "$acc0_5");
7292 RN (22, "$acc0_6");
7293 RN (23, "$acc0_7");
7295 RN (24, "$acc1_0");
7296 RN (25, "$acc1_1");
7297 RN (26, "$acc1_2");
7298 RN (27, "$acc1_3");
7299 RN (28, "$acc1_4");
7300 RN (29, "$acc1_5");
7301 RN (30, "$acc1_6");
7302 RN (31, "$acc1_7");
7303 #undef RN
7305 break;
7307 default:
7308 break;
7310 return TRUE;
7313 static void
7314 mep_asm_init_sections (void)
7316 based_section
7317 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7318 "\t.section .based,\"aw\"");
7320 tinybss_section
7321 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7322 "\t.section .sbss,\"aw\"");
7324 sdata_section
7325 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7326 "\t.section .sdata,\"aw\",@progbits");
7328 far_section
7329 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7330 "\t.section .far,\"aw\"");
7332 farbss_section
7333 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7334 "\t.section .farbss,\"aw\"");
7336 frodata_section
7337 = get_unnamed_section (0, output_section_asm_op,
7338 "\t.section .frodata,\"a\"");
7340 srodata_section
7341 = get_unnamed_section (0, output_section_asm_op,
7342 "\t.section .srodata,\"a\"");
7346 #include "gt-mep.h"