Merge from mainline (154736:156693)
[official-gcc/graphite-test-results.git] / gcc / config / mep / mep.c
blob54d21c8f1a003f547cb69ec281d83e18e89e9307
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "obstack.h"
39 #include "tree.h"
40 #include "expr.h"
41 #include "except.h"
42 #include "function.h"
43 #include "optabs.h"
44 #include "reload.h"
45 #include "tm_p.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "integrate.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "df.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
58 + Reload Support
59 + Costs
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
62 + Operand Printing
63 + Function args in registers
64 + Handle pipeline hazards
65 + Handle attributes
66 + Trampolines
67 + Machine-dependent Reorg
68 + Builtins. */
70 /* Symbol encodings:
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
74 b - based
75 t - tiny
76 n - near
77 f - far
78 i - io, near
79 I - io, far
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
86 /* For varargs. */
87 int arg_regs_to_save;
88 int regsave_filler;
89 int frame_filler;
90 int frame_locked;
92 /* Records __builtin_return address. */
93 rtx eh_stack_adjust;
95 int reg_save_size;
96 int reg_save_slot[FIRST_PSEUDO_REGISTER];
97 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
99 /* 2 if the current function has an interrupt attribute, 1 if not, 0
100 if unknown. This is here because resource.c uses EPILOGUE_USES
101 which needs it. */
102 int interrupt_handler;
104 /* Likewise, for disinterrupt attribute. */
105 int disable_interrupts;
107 /* Number of doloop tags used so far. */
108 int doloop_tags;
110 /* True if the last tag was allocated to a doloop_end. */
111 bool doloop_tag_from_end;
113 /* True if reload changes $TP. */
114 bool reload_changes_tp;
116 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
117 We only set this if the function is an interrupt handler. */
118 int asms_without_operands;
121 #define MEP_CONTROL_REG(x) \
122 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
124 static const struct attribute_spec mep_attribute_table[11];
126 static GTY(()) section * based_section;
127 static GTY(()) section * tinybss_section;
128 static GTY(()) section * far_section;
129 static GTY(()) section * farbss_section;
130 static GTY(()) section * frodata_section;
131 static GTY(()) section * srodata_section;
133 static GTY(()) section * vtext_section;
134 static GTY(()) section * vftext_section;
135 static GTY(()) section * ftext_section;
137 static void mep_set_leaf_registers (int);
138 static bool symbol_p (rtx);
139 static bool symbolref_p (rtx);
140 static void encode_pattern_1 (rtx);
141 static void encode_pattern (rtx);
142 static bool const_in_range (rtx, int, int);
143 static void mep_rewrite_mult (rtx, rtx);
144 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149 static bool mep_nongeneral_reg (rtx);
150 static bool mep_general_copro_reg (rtx);
151 static bool mep_nonregister (rtx);
152 static struct machine_function* mep_init_machine_status (void);
153 static rtx mep_tp_rtx (void);
154 static rtx mep_gp_rtx (void);
155 static bool mep_interrupt_p (void);
156 static bool mep_disinterrupt_p (void);
157 static bool mep_reg_set_p (rtx, rtx);
158 static bool mep_reg_set_in_function (int);
159 static bool mep_interrupt_saved_reg (int);
160 static bool mep_call_saves_register (int);
161 static rtx F (rtx);
162 static void add_constant (int, int, int, int);
163 static bool mep_function_uses_sp (void);
164 static rtx maybe_dead_move (rtx, rtx, bool);
165 static void mep_reload_pointer (int, const char *);
166 static void mep_start_function (FILE *, HOST_WIDE_INT);
167 static bool mep_function_ok_for_sibcall (tree, tree);
168 static int unique_bit_in (HOST_WIDE_INT);
169 static int bit_size_for_clip (HOST_WIDE_INT);
170 static int bytesize (const_tree, enum machine_mode);
171 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
172 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
173 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
174 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
175 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
176 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
177 static bool mep_function_attribute_inlinable_p (const_tree);
178 static bool mep_can_inline_p (tree, tree);
179 static bool mep_lookup_pragma_disinterrupt (const char *);
180 static int mep_multiple_address_regions (tree, bool);
181 static int mep_attrlist_to_encoding (tree, tree);
182 static void mep_insert_attributes (tree, tree *);
183 static void mep_encode_section_info (tree, rtx, int);
184 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
185 static void mep_unique_section (tree, int);
186 static unsigned int mep_section_type_flags (tree, const char *, int);
187 static void mep_asm_named_section (const char *, unsigned int, tree);
188 static bool mep_mentioned_p (rtx, rtx, int);
189 static void mep_reorg_regmove (rtx);
190 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
191 static void mep_reorg_repeat (rtx);
192 static bool mep_invertable_branch_p (rtx);
193 static void mep_invert_branch (rtx, rtx);
194 static void mep_reorg_erepeat (rtx);
195 static void mep_jmp_return_reorg (rtx);
196 static void mep_reorg_addcombine (rtx);
197 static void mep_reorg (void);
198 static void mep_init_intrinsics (void);
199 static void mep_init_builtins (void);
200 static void mep_intrinsic_unavailable (int);
201 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
202 static bool mep_get_move_insn (int, const struct cgen_insn **);
203 static rtx mep_convert_arg (enum machine_mode, rtx);
204 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
205 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
206 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
207 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
208 static int mep_adjust_cost (rtx, rtx, rtx, int);
209 static int mep_issue_rate (void);
210 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
211 static void mep_move_ready_insn (rtx *, int, rtx);
212 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
213 static rtx mep_make_bundle (rtx, rtx);
214 static void mep_bundle_insns (rtx);
215 static bool mep_rtx_cost (rtx, int, int, int *, bool);
216 static int mep_address_cost (rtx, bool);
217 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
218 tree, int *, int);
219 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
220 const_tree, bool);
221 static bool mep_vector_mode_supported_p (enum machine_mode);
222 static bool mep_handle_option (size_t, const char *, int);
223 static rtx mep_allocate_initial_value (rtx);
224 static void mep_asm_init_sections (void);
225 static int mep_comp_type_attributes (const_tree, const_tree);
226 static bool mep_narrow_volatile_bitfield (void);
227 static rtx mep_expand_builtin_saveregs (void);
228 static tree mep_build_builtin_va_list (void);
229 static void mep_expand_va_start (tree, rtx);
230 static tree mep_gimplify_va_arg_expr (tree, tree, tree *, tree *);
231 static bool mep_can_eliminate (const int, const int);
232 static void mep_trampoline_init (rtx, tree, rtx);
234 /* Initialize the GCC target structure. */
236 #undef TARGET_ASM_FUNCTION_PROLOGUE
237 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
238 #undef TARGET_ATTRIBUTE_TABLE
239 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
240 #undef TARGET_COMP_TYPE_ATTRIBUTES
241 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
242 #undef TARGET_INSERT_ATTRIBUTES
243 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
244 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
245 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
246 #undef TARGET_CAN_INLINE_P
247 #define TARGET_CAN_INLINE_P mep_can_inline_p
248 #undef TARGET_SECTION_TYPE_FLAGS
249 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
250 #undef TARGET_ASM_NAMED_SECTION
251 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
252 #undef TARGET_INIT_BUILTINS
253 #define TARGET_INIT_BUILTINS mep_init_builtins
254 #undef TARGET_EXPAND_BUILTIN
255 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
256 #undef TARGET_SCHED_ADJUST_COST
257 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
258 #undef TARGET_SCHED_ISSUE_RATE
259 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
260 #undef TARGET_SCHED_REORDER
261 #define TARGET_SCHED_REORDER mep_sched_reorder
262 #undef TARGET_STRIP_NAME_ENCODING
263 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
264 #undef TARGET_ASM_SELECT_SECTION
265 #define TARGET_ASM_SELECT_SECTION mep_select_section
266 #undef TARGET_ASM_UNIQUE_SECTION
267 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
268 #undef TARGET_ENCODE_SECTION_INFO
269 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
270 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
271 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
272 #undef TARGET_RTX_COSTS
273 #define TARGET_RTX_COSTS mep_rtx_cost
274 #undef TARGET_ADDRESS_COST
275 #define TARGET_ADDRESS_COST mep_address_cost
276 #undef TARGET_MACHINE_DEPENDENT_REORG
277 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
278 #undef TARGET_SETUP_INCOMING_VARARGS
279 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
280 #undef TARGET_PASS_BY_REFERENCE
281 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
282 #undef TARGET_VECTOR_MODE_SUPPORTED_P
283 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
284 #undef TARGET_HANDLE_OPTION
285 #define TARGET_HANDLE_OPTION mep_handle_option
286 #undef TARGET_DEFAULT_TARGET_FLAGS
287 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
288 #undef TARGET_ALLOCATE_INITIAL_VALUE
289 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
290 #undef TARGET_ASM_INIT_SECTIONS
291 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
292 #undef TARGET_RETURN_IN_MEMORY
293 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
294 #undef TARGET_NARROW_VOLATILE_BITFIELD
295 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
296 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
297 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
298 #undef TARGET_BUILD_BUILTIN_VA_LIST
299 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
300 #undef TARGET_EXPAND_BUILTIN_VA_START
301 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
302 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
303 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
304 #undef TARGET_CAN_ELIMINATE
305 #define TARGET_CAN_ELIMINATE mep_can_eliminate
306 #undef TARGET_TRAMPOLINE_INIT
307 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
309 struct gcc_target targetm = TARGET_INITIALIZER;
311 #define WANT_GCC_DEFINITIONS
312 #include "mep-intrin.h"
313 #undef WANT_GCC_DEFINITIONS
316 /* Command Line Option Support. */
318 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
320 /* True if we can use cmov instructions to move values back and forth
321 between core and coprocessor registers. */
322 bool mep_have_core_copro_moves_p;
324 /* True if we can use cmov instructions (or a work-alike) to move
325 values between coprocessor registers. */
326 bool mep_have_copro_copro_moves_p;
328 /* A table of all coprocessor instructions that can act like
329 a coprocessor-to-coprocessor cmov. */
330 static const int mep_cmov_insns[] = {
331 mep_cmov,
332 mep_cpmov,
333 mep_fmovs,
334 mep_caddi3,
335 mep_csubi3,
336 mep_candi3,
337 mep_cori3,
338 mep_cxori3,
339 mep_cand3,
340 mep_cor3
343 static int option_mtiny_specified = 0;
346 static void
347 mep_set_leaf_registers (int enable)
349 int i;
351 if (mep_leaf_registers[0] != enable)
352 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
353 mep_leaf_registers[i] = enable;
356 void
357 mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
359 int i;
361 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
363 fixed_regs[HI_REGNO] = 1;
364 fixed_regs[LO_REGNO] = 1;
365 call_used_regs[HI_REGNO] = 1;
366 call_used_regs[LO_REGNO] = 1;
369 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
370 global_regs[i] = 1;
373 void
374 mep_optimization_options (void)
376 /* The first scheduling pass often increases register pressure and tends
377 to result in more spill code. Only run it when specifically asked. */
378 flag_schedule_insns = 0;
380 /* Using $fp doesn't gain us much, even when debugging is important. */
381 flag_omit_frame_pointer = 1;
384 void
385 mep_override_options (void)
387 if (flag_pic == 1)
388 warning (OPT_fpic, "-fpic is not supported");
389 if (flag_pic == 2)
390 warning (OPT_fPIC, "-fPIC is not supported");
391 if (TARGET_S && TARGET_M)
392 error ("only one of -ms and -mm may be given");
393 if (TARGET_S && TARGET_L)
394 error ("only one of -ms and -ml may be given");
395 if (TARGET_M && TARGET_L)
396 error ("only one of -mm and -ml may be given");
397 if (TARGET_S && option_mtiny_specified)
398 error ("only one of -ms and -mtiny= may be given");
399 if (TARGET_M && option_mtiny_specified)
400 error ("only one of -mm and -mtiny= may be given");
401 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
402 warning (0, "-mclip currently has no effect without -mminmax");
404 if (mep_const_section)
406 if (strcmp (mep_const_section, "tiny") != 0
407 && strcmp (mep_const_section, "near") != 0
408 && strcmp (mep_const_section, "far") != 0)
409 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
412 if (TARGET_S)
413 mep_tiny_cutoff = 65536;
414 if (TARGET_M)
415 mep_tiny_cutoff = 0;
416 if (TARGET_L && ! option_mtiny_specified)
417 mep_tiny_cutoff = 0;
419 if (TARGET_64BIT_CR_REGS)
420 flag_split_wide_types = 0;
422 init_machine_status = mep_init_machine_status;
423 mep_init_intrinsics ();
426 /* Pattern Support - constraints, predicates, expanders. */
428 /* MEP has very few instructions that can refer to the span of
429 addresses used by symbols, so it's common to check for them. */
431 static bool
432 symbol_p (rtx x)
434 int c = GET_CODE (x);
436 return (c == CONST_INT
437 || c == CONST
438 || c == SYMBOL_REF);
441 static bool
442 symbolref_p (rtx x)
444 int c;
446 if (GET_CODE (x) != MEM)
447 return false;
449 c = GET_CODE (XEXP (x, 0));
450 return (c == CONST_INT
451 || c == CONST
452 || c == SYMBOL_REF);
455 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
457 #define GEN_REG(R, STRICT) \
458 (GR_REGNO_P (R) \
459 || (!STRICT \
460 && ((R) == ARG_POINTER_REGNUM \
461 || (R) >= FIRST_PSEUDO_REGISTER)))
463 static char pattern[12], *patternp;
464 static GTY(()) rtx patternr[12];
465 #define RTX_IS(x) (strcmp (pattern, x) == 0)
467 static void
468 encode_pattern_1 (rtx x)
470 int i;
472 if (patternp == pattern + sizeof (pattern) - 2)
474 patternp[-1] = '?';
475 return;
478 patternr[patternp-pattern] = x;
480 switch (GET_CODE (x))
482 case REG:
483 *patternp++ = 'r';
484 break;
485 case MEM:
486 *patternp++ = 'm';
487 case CONST:
488 encode_pattern_1 (XEXP(x, 0));
489 break;
490 case PLUS:
491 *patternp++ = '+';
492 encode_pattern_1 (XEXP(x, 0));
493 encode_pattern_1 (XEXP(x, 1));
494 break;
495 case LO_SUM:
496 *patternp++ = 'L';
497 encode_pattern_1 (XEXP(x, 0));
498 encode_pattern_1 (XEXP(x, 1));
499 break;
500 case HIGH:
501 *patternp++ = 'H';
502 encode_pattern_1 (XEXP(x, 0));
503 break;
504 case SYMBOL_REF:
505 *patternp++ = 's';
506 break;
507 case LABEL_REF:
508 *patternp++ = 'l';
509 break;
510 case CONST_INT:
511 case CONST_DOUBLE:
512 *patternp++ = 'i';
513 break;
514 case UNSPEC:
515 *patternp++ = 'u';
516 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
517 for (i=0; i<XVECLEN (x, 0); i++)
518 encode_pattern_1 (XVECEXP (x, 0, i));
519 break;
520 case USE:
521 *patternp++ = 'U';
522 break;
523 default:
524 *patternp++ = '?';
525 #if 0
526 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
527 debug_rtx (x);
528 gcc_unreachable ();
529 #endif
530 break;
534 static void
535 encode_pattern (rtx x)
537 patternp = pattern;
538 encode_pattern_1 (x);
539 *patternp = 0;
543 mep_section_tag (rtx x)
545 const char *name;
547 while (1)
549 switch (GET_CODE (x))
551 case MEM:
552 case CONST:
553 x = XEXP (x, 0);
554 break;
555 case UNSPEC:
556 x = XVECEXP (x, 0, 0);
557 break;
558 case PLUS:
559 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
560 return 0;
561 x = XEXP (x, 0);
562 break;
563 default:
564 goto done;
567 done:
568 if (GET_CODE (x) != SYMBOL_REF)
569 return 0;
570 name = XSTR (x, 0);
571 if (name[0] == '@' && name[2] == '.')
573 if (name[1] == 'i' || name[1] == 'I')
575 if (name[1] == 'I')
576 return 'f'; /* near */
577 return 'n'; /* far */
579 return name[1];
581 return 0;
585 mep_regno_reg_class (int regno)
587 switch (regno)
589 case SP_REGNO: return SP_REGS;
590 case TP_REGNO: return TP_REGS;
591 case GP_REGNO: return GP_REGS;
592 case 0: return R0_REGS;
593 case HI_REGNO: return HI_REGS;
594 case LO_REGNO: return LO_REGS;
595 case ARG_POINTER_REGNUM: return GENERAL_REGS;
598 if (GR_REGNO_P (regno))
599 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
600 if (CONTROL_REGNO_P (regno))
601 return CONTROL_REGS;
603 if (CR_REGNO_P (regno))
605 int i, j;
607 /* Search for the register amongst user-defined subclasses of
608 the coprocessor registers. */
609 for (i = USER0_REGS; i <= USER3_REGS; ++i)
611 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
612 continue;
613 for (j = 0; j < N_REG_CLASSES; ++j)
615 enum reg_class sub = reg_class_subclasses[i][j];
617 if (sub == LIM_REG_CLASSES)
618 return i;
619 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
620 break;
624 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
627 if (CCR_REGNO_P (regno))
628 return CCR_REGS;
630 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
631 return NO_REGS;
634 #if 0
636 mep_reg_class_from_constraint (int c, const char *str)
638 switch (c)
640 case 'a':
641 return SP_REGS;
642 case 'b':
643 return TP_REGS;
644 case 'c':
645 return CONTROL_REGS;
646 case 'd':
647 return HILO_REGS;
648 case 'e':
650 switch (str[1])
652 case 'm':
653 return LOADABLE_CR_REGS;
654 case 'x':
655 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
656 case 'r':
657 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
658 default:
659 return NO_REGS;
662 case 'h':
663 return HI_REGS;
664 case 'j':
665 return RPC_REGS;
666 case 'l':
667 return LO_REGS;
668 case 't':
669 return TPREL_REGS;
670 case 'v':
671 return GP_REGS;
672 case 'x':
673 return CR_REGS;
674 case 'y':
675 return CCR_REGS;
676 case 'z':
677 return R0_REGS;
679 case 'A':
680 case 'B':
681 case 'C':
682 case 'D':
684 enum reg_class which = c - 'A' + USER0_REGS;
685 return (reg_class_size[which] > 0 ? which : NO_REGS);
688 default:
689 return NO_REGS;
693 bool
694 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
696 switch (c)
698 case 'I': return value >= -32768 && value < 32768;
699 case 'J': return value >= 0 && value < 65536;
700 case 'K': return value >= 0 && value < 0x01000000;
701 case 'L': return value >= -32 && value < 32;
702 case 'M': return value >= 0 && value < 32;
703 case 'N': return value >= 0 && value < 16;
704 case 'O':
705 if (value & 0xffff)
706 return false;
707 return value >= -2147483647-1 && value <= 2147483647;
708 default:
709 gcc_unreachable ();
713 bool
714 mep_extra_constraint (rtx value, int c)
716 encode_pattern (value);
718 switch (c)
720 case 'R':
721 /* For near symbols, like what call uses. */
722 if (GET_CODE (value) == REG)
723 return 0;
724 return mep_call_address_operand (value, GET_MODE (value));
726 case 'S':
727 /* For signed 8-bit immediates. */
728 return (GET_CODE (value) == CONST_INT
729 && INTVAL (value) >= -128
730 && INTVAL (value) <= 127);
732 case 'T':
733 /* For tp/gp relative symbol values. */
734 return (RTX_IS ("u3s") || RTX_IS ("u2s")
735 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
737 case 'U':
738 /* Non-absolute memories. */
739 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
741 case 'W':
742 /* %hi(sym) */
743 return RTX_IS ("Hs");
745 case 'Y':
746 /* Register indirect. */
747 return RTX_IS ("mr");
749 case 'Z':
750 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
753 return false;
755 #endif
757 #undef PASS
758 #undef FAIL
760 static bool
761 const_in_range (rtx x, int minv, int maxv)
763 return (GET_CODE (x) == CONST_INT
764 && INTVAL (x) >= minv
765 && INTVAL (x) <= maxv);
768 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
769 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
770 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
771 at the end of the insn stream. */
774 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
776 if (rtx_equal_p (dest, src1))
777 return src2;
778 else if (rtx_equal_p (dest, src2))
779 return src1;
780 else
782 if (insn == 0)
783 emit_insn (gen_movsi (copy_rtx (dest), src1));
784 else
785 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
786 return src2;
790 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
791 Change the last element of PATTERN from (clobber (scratch:SI))
792 to (clobber (reg:SI HI_REGNO)). */
794 static void
795 mep_rewrite_mult (rtx insn, rtx pattern)
797 rtx hi_clobber;
799 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
800 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
801 PATTERN (insn) = pattern;
802 INSN_CODE (insn) = -1;
805 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
806 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
807 store the result in DEST if nonnull. */
809 static void
810 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
812 rtx lo, pattern;
814 lo = gen_rtx_REG (SImode, LO_REGNO);
815 if (dest)
816 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
817 mep_mulr_source (insn, dest, src1, src2));
818 else
819 pattern = gen_mulsi3_lo (lo, src1, src2);
820 mep_rewrite_mult (insn, pattern);
823 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
824 SRC3 into $lo, then use either madd or maddr. The move into $lo will
825 be deleted by a peephole2 if SRC3 is already in $lo. */
827 static void
828 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
830 rtx lo, pattern;
832 lo = gen_rtx_REG (SImode, LO_REGNO);
833 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
834 if (dest)
835 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
836 mep_mulr_source (insn, dest, src1, src2),
837 copy_rtx (lo));
838 else
839 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
840 mep_rewrite_mult (insn, pattern);
843 /* Return true if $lo has the same value as integer register GPR when
844 instruction INSN is reached. If necessary, rewrite the instruction
845 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
846 rtx for (reg:SI LO_REGNO).
848 This function is intended to be used by the peephole2 pass. Since
849 that pass goes from the end of a basic block to the beginning, and
850 propagates liveness information on the way, there is no need to
851 update register notes here.
853 If GPR_DEAD_P is true on entry, and this function returns true,
854 then the caller will replace _every_ use of GPR in and after INSN
855 with LO. This means that if the instruction that sets $lo is a
856 mulr- or maddr-type instruction, we can rewrite it to use mul or
857 madd instead. In combination with the copy progagation pass,
858 this allows us to replace sequences like:
860 mov GPR,R1
861 mulr GPR,R2
863 with:
865 mul R1,R2
867 if GPR is no longer used. */
869 static bool
870 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
874 insn = PREV_INSN (insn);
875 if (INSN_P (insn))
876 switch (recog_memoized (insn))
878 case CODE_FOR_mulsi3_1:
879 extract_insn (insn);
880 if (rtx_equal_p (recog_data.operand[0], gpr))
882 mep_rewrite_mulsi3 (insn,
883 gpr_dead_p ? NULL : recog_data.operand[0],
884 recog_data.operand[1],
885 recog_data.operand[2]);
886 return true;
888 return false;
890 case CODE_FOR_maddsi3:
891 extract_insn (insn);
892 if (rtx_equal_p (recog_data.operand[0], gpr))
894 mep_rewrite_maddsi3 (insn,
895 gpr_dead_p ? NULL : recog_data.operand[0],
896 recog_data.operand[1],
897 recog_data.operand[2],
898 recog_data.operand[3]);
899 return true;
901 return false;
903 case CODE_FOR_mulsi3r:
904 case CODE_FOR_maddsi3r:
905 extract_insn (insn);
906 return rtx_equal_p (recog_data.operand[1], gpr);
908 default:
909 if (reg_set_p (lo, insn)
910 || reg_set_p (gpr, insn)
911 || volatile_insn_p (PATTERN (insn)))
912 return false;
914 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
915 gpr_dead_p = false;
916 break;
919 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
920 return false;
923 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
925 bool
926 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
928 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
929 extract_insn (insn);
930 return result;
933 /* Return true if SET can be turned into a post-modify load or store
934 that adds OFFSET to GPR. In other words, return true if SET can be
935 changed into:
937 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
939 It's OK to change SET to an equivalent operation in order to
940 make it match. */
942 static bool
943 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
945 rtx *reg, *mem;
946 unsigned int reg_bytes, mem_bytes;
947 enum machine_mode reg_mode, mem_mode;
949 /* Only simple SETs can be converted. */
950 if (GET_CODE (set) != SET)
951 return false;
953 /* Point REG to what we hope will be the register side of the set and
954 MEM to what we hope will be the memory side. */
955 if (GET_CODE (SET_DEST (set)) == MEM)
957 mem = &SET_DEST (set);
958 reg = &SET_SRC (set);
960 else
962 reg = &SET_DEST (set);
963 mem = &SET_SRC (set);
964 if (GET_CODE (*mem) == SIGN_EXTEND)
965 mem = &XEXP (*mem, 0);
968 /* Check that *REG is a suitable coprocessor register. */
969 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
970 return false;
972 /* Check that *MEM is a suitable memory reference. */
973 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
974 return false;
976 /* Get the number of bytes in each operand. */
977 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
978 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
980 /* Check that OFFSET is suitably aligned. */
981 if (INTVAL (offset) & (mem_bytes - 1))
982 return false;
984 /* Convert *MEM to a normal integer mode. */
985 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
986 *mem = change_address (*mem, mem_mode, NULL);
988 /* Adjust *REG as well. */
989 *reg = shallow_copy_rtx (*reg);
990 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
992 /* SET is a subword load. Convert it to an explicit extension. */
993 PUT_MODE (*reg, SImode);
994 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
996 else
998 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
999 PUT_MODE (*reg, reg_mode);
1001 return true;
1004 /* Return the effect of frame-related instruction INSN. */
1006 static rtx
1007 mep_frame_expr (rtx insn)
1009 rtx note, expr;
1011 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
1012 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
1013 RTX_FRAME_RELATED_P (expr) = 1;
1014 return expr;
1017 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1018 new pattern in INSN1; INSN2 will be deleted by the caller. */
1020 static void
1021 mep_make_parallel (rtx insn1, rtx insn2)
1023 rtx expr;
1025 if (RTX_FRAME_RELATED_P (insn2))
1027 expr = mep_frame_expr (insn2);
1028 if (RTX_FRAME_RELATED_P (insn1))
1029 expr = gen_rtx_SEQUENCE (VOIDmode,
1030 gen_rtvec (2, mep_frame_expr (insn1), expr));
1031 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1032 RTX_FRAME_RELATED_P (insn1) = 1;
1035 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1036 gen_rtvec (2, PATTERN (insn1),
1037 PATTERN (insn2)));
1038 INSN_CODE (insn1) = -1;
1041 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1042 the basic block to see if any previous load or store instruction can
1043 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1045 static bool
1046 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1048 rtx insn;
1050 insn = set_insn;
1053 insn = PREV_INSN (insn);
1054 if (INSN_P (insn))
1056 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1058 mep_make_parallel (insn, set_insn);
1059 return true;
1062 if (reg_set_p (reg, insn)
1063 || reg_referenced_p (reg, PATTERN (insn))
1064 || volatile_insn_p (PATTERN (insn)))
1065 return false;
1068 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1069 return false;
1072 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1074 bool
1075 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1077 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1078 extract_insn (insn);
1079 return result;
1082 bool
1083 mep_allow_clip (rtx ux, rtx lx, int s)
1085 HOST_WIDE_INT u = INTVAL (ux);
1086 HOST_WIDE_INT l = INTVAL (lx);
1087 int i;
1089 if (!TARGET_OPT_CLIP)
1090 return false;
1092 if (s)
1094 for (i = 0; i < 30; i ++)
1095 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1096 && (l == - ((HOST_WIDE_INT) 1 << i)))
1097 return true;
1099 else
1101 if (l != 0)
1102 return false;
1104 for (i = 0; i < 30; i ++)
1105 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1106 return true;
1108 return false;
1111 bool
1112 mep_bit_position_p (rtx x, bool looking_for)
1114 if (GET_CODE (x) != CONST_INT)
1115 return false;
1116 switch ((int) INTVAL(x) & 0xff)
1118 case 0x01: case 0x02: case 0x04: case 0x08:
1119 case 0x10: case 0x20: case 0x40: case 0x80:
1120 return looking_for;
1121 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1122 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1123 return !looking_for;
1125 return false;
1128 static bool
1129 move_needs_splitting (rtx dest, rtx src,
1130 enum machine_mode mode ATTRIBUTE_UNUSED)
1132 int s = mep_section_tag (src);
1134 while (1)
1136 if (GET_CODE (src) == CONST
1137 || GET_CODE (src) == MEM)
1138 src = XEXP (src, 0);
1139 else if (GET_CODE (src) == SYMBOL_REF
1140 || GET_CODE (src) == LABEL_REF
1141 || GET_CODE (src) == PLUS)
1142 break;
1143 else
1144 return false;
1146 if (s == 'f'
1147 || (GET_CODE (src) == PLUS
1148 && GET_CODE (XEXP (src, 1)) == CONST_INT
1149 && (INTVAL (XEXP (src, 1)) < -65536
1150 || INTVAL (XEXP (src, 1)) > 0xffffff))
1151 || (GET_CODE (dest) == REG
1152 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1153 return true;
1154 return false;
1157 bool
1158 mep_split_mov (rtx *operands, int symbolic)
1160 if (symbolic)
1162 if (move_needs_splitting (operands[0], operands[1], SImode))
1163 return true;
1164 return false;
1167 if (GET_CODE (operands[1]) != CONST_INT)
1168 return false;
1170 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1171 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1172 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1173 return false;
1175 if (((!reload_completed && !reload_in_progress)
1176 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1177 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1178 return false;
1180 return true;
1183 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1184 it to one specific value. So the insn chosen depends on whether
1185 the source and destination modes match. */
1187 bool
1188 mep_vliw_mode_match (rtx tgt)
1190 bool src_vliw = mep_vliw_function_p (cfun->decl);
1191 bool tgt_vliw = INTVAL (tgt);
1193 return src_vliw == tgt_vliw;
1196 /* Like the above, but also test for near/far mismatches. */
1198 bool
1199 mep_vliw_jmp_match (rtx tgt)
1201 bool src_vliw = mep_vliw_function_p (cfun->decl);
1202 bool tgt_vliw = INTVAL (tgt);
1204 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1205 return false;
1207 return src_vliw == tgt_vliw;
1210 bool
1211 mep_multi_slot (rtx x)
1213 return get_attr_slot (x) == SLOT_MULTI;
1217 bool
1218 mep_legitimate_constant_p (rtx x)
1220 /* We can't convert symbol values to gp- or tp-rel values after
1221 reload, as reload might have used $gp or $tp for other
1222 purposes. */
1223 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1225 char e = mep_section_tag (x);
1226 return (e != 't' && e != 'b');
1228 return 1;
1231 /* Be careful not to use macros that need to be compiled one way for
1232 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1234 bool
1235 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1237 int the_tag;
1239 #define DEBUG_LEGIT 0
1240 #if DEBUG_LEGIT
1241 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1242 debug_rtx (x);
1243 #endif
1245 if (GET_CODE (x) == LO_SUM
1246 && GET_CODE (XEXP (x, 0)) == REG
1247 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1248 && CONSTANT_P (XEXP (x, 1)))
1250 if (GET_MODE_SIZE (mode) > 4)
1252 /* We will end up splitting this, and lo_sums are not
1253 offsettable for us. */
1254 #if DEBUG_LEGIT
1255 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1256 #endif
1257 return false;
1259 #if DEBUG_LEGIT
1260 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1261 #endif
1262 return true;
1265 if (GET_CODE (x) == REG
1266 && GEN_REG (REGNO (x), strict))
1268 #if DEBUG_LEGIT
1269 fprintf (stderr, " - yup, [reg]\n");
1270 #endif
1271 return true;
1274 if (GET_CODE (x) == PLUS
1275 && GET_CODE (XEXP (x, 0)) == REG
1276 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1277 && const_in_range (XEXP (x, 1), -32768, 32767))
1279 #if DEBUG_LEGIT
1280 fprintf (stderr, " - yup, [reg+const]\n");
1281 #endif
1282 return true;
1285 if (GET_CODE (x) == PLUS
1286 && GET_CODE (XEXP (x, 0)) == REG
1287 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1288 && GET_CODE (XEXP (x, 1)) == CONST
1289 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1290 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1291 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1292 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1294 #if DEBUG_LEGIT
1295 fprintf (stderr, " - yup, [reg+unspec]\n");
1296 #endif
1297 return true;
1300 the_tag = mep_section_tag (x);
1302 if (the_tag == 'f')
1304 #if DEBUG_LEGIT
1305 fprintf (stderr, " - nope, [far]\n");
1306 #endif
1307 return false;
1310 if (mode == VOIDmode
1311 && GET_CODE (x) == SYMBOL_REF)
1313 #if DEBUG_LEGIT
1314 fprintf (stderr, " - yup, call [symbol]\n");
1315 #endif
1316 return true;
1319 if ((mode == SImode || mode == SFmode)
1320 && CONSTANT_P (x)
1321 && LEGITIMATE_CONSTANT_P (x)
1322 && the_tag != 't' && the_tag != 'b')
1324 if (GET_CODE (x) != CONST_INT
1325 || (INTVAL (x) <= 0xfffff
1326 && INTVAL (x) >= 0
1327 && (INTVAL (x) % 4) == 0))
1329 #if DEBUG_LEGIT
1330 fprintf (stderr, " - yup, [const]\n");
1331 #endif
1332 return true;
1336 #if DEBUG_LEGIT
1337 fprintf (stderr, " - nope.\n");
1338 #endif
1339 return false;
1343 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1344 enum reload_type type,
1345 int ind_levels ATTRIBUTE_UNUSED)
1347 if (GET_CODE (*x) == PLUS
1348 && GET_CODE (XEXP (*x, 0)) == MEM
1349 && GET_CODE (XEXP (*x, 1)) == REG)
1351 /* GCC will by default copy the MEM into a REG, which results in
1352 an invalid address. For us, the best thing to do is move the
1353 whole expression to a REG. */
1354 push_reload (*x, NULL_RTX, x, NULL,
1355 GENERAL_REGS, mode, VOIDmode,
1356 0, 0, opnum, type);
1357 return 1;
1360 if (GET_CODE (*x) == PLUS
1361 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1362 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1364 char e = mep_section_tag (XEXP (*x, 0));
1366 if (e != 't' && e != 'b')
1368 /* GCC thinks that (sym+const) is a valid address. Well,
1369 sometimes it is, this time it isn't. The best thing to
1370 do is reload the symbol to a register, since reg+int
1371 tends to work, and we can't just add the symbol and
1372 constant anyway. */
1373 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1374 GENERAL_REGS, mode, VOIDmode,
1375 0, 0, opnum, type);
1376 return 1;
1379 return 0;
1383 mep_core_address_length (rtx insn, int opn)
1385 rtx set = single_set (insn);
1386 rtx mem = XEXP (set, opn);
1387 rtx other = XEXP (set, 1-opn);
1388 rtx addr = XEXP (mem, 0);
1390 if (register_operand (addr, Pmode))
1391 return 2;
1392 if (GET_CODE (addr) == PLUS)
1394 rtx addend = XEXP (addr, 1);
1396 gcc_assert (REG_P (XEXP (addr, 0)));
1398 switch (REGNO (XEXP (addr, 0)))
1400 case STACK_POINTER_REGNUM:
1401 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1402 && mep_imm7a4_operand (addend, VOIDmode))
1403 return 2;
1404 break;
1406 case 13: /* TP */
1407 gcc_assert (REG_P (other));
1409 if (REGNO (other) >= 8)
1410 break;
1412 if (GET_CODE (addend) == CONST
1413 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1414 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1415 return 2;
1417 if (GET_CODE (addend) == CONST_INT
1418 && INTVAL (addend) >= 0
1419 && INTVAL (addend) <= 127
1420 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1421 return 2;
1422 break;
1426 return 4;
1430 mep_cop_address_length (rtx insn, int opn)
1432 rtx set = single_set (insn);
1433 rtx mem = XEXP (set, opn);
1434 rtx addr = XEXP (mem, 0);
1436 if (GET_CODE (mem) != MEM)
1437 return 2;
1438 if (register_operand (addr, Pmode))
1439 return 2;
1440 if (GET_CODE (addr) == POST_INC)
1441 return 2;
1443 return 4;
1446 #define DEBUG_EXPAND_MOV 0
1447 bool
1448 mep_expand_mov (rtx *operands, enum machine_mode mode)
1450 int i, t;
1451 int tag[2];
1452 rtx tpsym, tpoffs;
1453 int post_reload = 0;
1455 tag[0] = mep_section_tag (operands[0]);
1456 tag[1] = mep_section_tag (operands[1]);
1458 if (!reload_in_progress
1459 && !reload_completed
1460 && GET_CODE (operands[0]) != REG
1461 && GET_CODE (operands[0]) != SUBREG
1462 && GET_CODE (operands[1]) != REG
1463 && GET_CODE (operands[1]) != SUBREG)
1464 operands[1] = copy_to_mode_reg (mode, operands[1]);
1466 #if DEBUG_EXPAND_MOV
1467 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1468 reload_in_progress || reload_completed);
1469 debug_rtx (operands[0]);
1470 debug_rtx (operands[1]);
1471 #endif
1473 if (mode == DImode || mode == DFmode)
1474 return false;
1476 if (reload_in_progress || reload_completed)
1478 rtx r;
1480 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1481 cfun->machine->reload_changes_tp = true;
1483 if (tag[0] == 't' || tag[1] == 't')
1485 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1486 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1487 post_reload = 1;
1489 if (tag[0] == 'b' || tag[1] == 'b')
1491 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1492 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1493 post_reload = 1;
1495 if (cfun->machine->reload_changes_tp == true)
1496 post_reload = 1;
1499 if (!post_reload)
1501 rtx n;
1502 if (symbol_p (operands[1]))
1504 t = mep_section_tag (operands[1]);
1505 if (t == 'b' || t == 't')
1508 if (GET_CODE (operands[1]) == SYMBOL_REF)
1510 tpsym = operands[1];
1511 n = gen_rtx_UNSPEC (mode,
1512 gen_rtvec (1, operands[1]),
1513 t == 'b' ? UNS_TPREL : UNS_GPREL);
1514 n = gen_rtx_CONST (mode, n);
1516 else if (GET_CODE (operands[1]) == CONST
1517 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1518 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1519 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1521 tpsym = XEXP (XEXP (operands[1], 0), 0);
1522 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1523 n = gen_rtx_UNSPEC (mode,
1524 gen_rtvec (1, tpsym),
1525 t == 'b' ? UNS_TPREL : UNS_GPREL);
1526 n = gen_rtx_PLUS (mode, n, tpoffs);
1527 n = gen_rtx_CONST (mode, n);
1529 else if (GET_CODE (operands[1]) == CONST
1530 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1531 return false;
1532 else
1534 error ("unusual TP-relative address");
1535 return false;
1538 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1539 : mep_gp_rtx ()), n);
1540 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1541 #if DEBUG_EXPAND_MOV
1542 fprintf(stderr, "mep_expand_mov emitting ");
1543 debug_rtx(n);
1544 #endif
1545 return true;
1549 for (i=0; i < 2; i++)
1551 t = mep_section_tag (operands[i]);
1552 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1554 rtx sym, n, r;
1555 int u;
1557 sym = XEXP (operands[i], 0);
1558 if (GET_CODE (sym) == CONST
1559 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1560 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1562 if (t == 'b')
1564 r = mep_tp_rtx ();
1565 u = UNS_TPREL;
1567 else
1569 r = mep_gp_rtx ();
1570 u = UNS_GPREL;
1573 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1574 n = gen_rtx_CONST (Pmode, n);
1575 n = gen_rtx_PLUS (Pmode, r, n);
1576 operands[i] = replace_equiv_address (operands[i], n);
1581 if ((GET_CODE (operands[1]) != REG
1582 && MEP_CONTROL_REG (operands[0]))
1583 || (GET_CODE (operands[0]) != REG
1584 && MEP_CONTROL_REG (operands[1])))
1586 rtx temp;
1587 #if DEBUG_EXPAND_MOV
1588 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1589 #endif
1590 temp = gen_reg_rtx (mode);
1591 emit_move_insn (temp, operands[1]);
1592 operands[1] = temp;
1595 if (symbolref_p (operands[0])
1596 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1597 || (GET_MODE_SIZE (mode) != 4)))
1599 rtx temp;
1601 gcc_assert (!reload_in_progress && !reload_completed);
1603 temp = force_reg (Pmode, XEXP (operands[0], 0));
1604 operands[0] = replace_equiv_address (operands[0], temp);
1605 emit_move_insn (operands[0], operands[1]);
1606 return true;
1609 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1610 tag[1] = 0;
1612 if (symbol_p (operands[1])
1613 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1615 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1616 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1617 return true;
1620 if (symbolref_p (operands[1])
1621 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1623 rtx temp;
1625 if (reload_in_progress || reload_completed)
1626 temp = operands[0];
1627 else
1628 temp = gen_reg_rtx (Pmode);
1630 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1631 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1632 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1633 return true;
1636 return false;
1639 /* Cases where the pattern can't be made to use at all. */
1641 bool
1642 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1644 int i;
1646 #define DEBUG_MOV_OK 0
1647 #if DEBUG_MOV_OK
1648 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1649 mep_section_tag (operands[1]));
1650 debug_rtx (operands[0]);
1651 debug_rtx (operands[1]);
1652 #endif
1654 /* We want the movh patterns to get these. */
1655 if (GET_CODE (operands[1]) == HIGH)
1656 return false;
1658 /* We can't store a register to a far variable without using a
1659 scratch register to hold the address. Using far variables should
1660 be split by mep_emit_mov anyway. */
1661 if (mep_section_tag (operands[0]) == 'f'
1662 || mep_section_tag (operands[1]) == 'f')
1664 #if DEBUG_MOV_OK
1665 fprintf (stderr, " - no, f\n");
1666 #endif
1667 return false;
1669 i = mep_section_tag (operands[1]);
1670 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1671 /* These are supposed to be generated with adds of the appropriate
1672 register. During and after reload, however, we allow them to
1673 be accessed as normal symbols because adding a dependency on
1674 the base register now might cause problems. */
1676 #if DEBUG_MOV_OK
1677 fprintf (stderr, " - no, bt\n");
1678 #endif
1679 return false;
1682 /* The only moves we can allow involve at least one general
1683 register, so require it. */
1684 for (i = 0; i < 2; i ++)
1686 /* Allow subregs too, before reload. */
1687 rtx x = operands[i];
1689 if (GET_CODE (x) == SUBREG)
1690 x = XEXP (x, 0);
1691 if (GET_CODE (x) == REG
1692 && ! MEP_CONTROL_REG (x))
1694 #if DEBUG_MOV_OK
1695 fprintf (stderr, " - ok\n");
1696 #endif
1697 return true;
1700 #if DEBUG_MOV_OK
1701 fprintf (stderr, " - no, no gen reg\n");
1702 #endif
1703 return false;
1706 #define DEBUG_SPLIT_WIDE_MOVE 0
1707 void
1708 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1710 int i;
1712 #if DEBUG_SPLIT_WIDE_MOVE
1713 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1714 debug_rtx (operands[0]);
1715 debug_rtx (operands[1]);
1716 #endif
1718 for (i = 0; i <= 1; i++)
1720 rtx op = operands[i], hi, lo;
1722 switch (GET_CODE (op))
1724 case REG:
1726 unsigned int regno = REGNO (op);
1728 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1730 rtx i32;
1732 lo = gen_rtx_REG (SImode, regno);
1733 i32 = GEN_INT (32);
1734 hi = gen_rtx_ZERO_EXTRACT (SImode,
1735 gen_rtx_REG (DImode, regno),
1736 i32, i32);
1738 else
1740 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1741 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1744 break;
1746 case CONST_INT:
1747 case CONST_DOUBLE:
1748 case MEM:
1749 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1750 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1751 break;
1753 default:
1754 gcc_unreachable ();
1757 /* The high part of CR <- GPR moves must be done after the low part. */
1758 operands [i + 4] = lo;
1759 operands [i + 2] = hi;
1762 if (reg_mentioned_p (operands[2], operands[5])
1763 || GET_CODE (operands[2]) == ZERO_EXTRACT
1764 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1766 rtx tmp;
1768 /* Overlapping register pairs -- make sure we don't
1769 early-clobber ourselves. */
1770 tmp = operands[2];
1771 operands[2] = operands[4];
1772 operands[4] = tmp;
1773 tmp = operands[3];
1774 operands[3] = operands[5];
1775 operands[5] = tmp;
1778 #if DEBUG_SPLIT_WIDE_MOVE
1779 fprintf(stderr, "\033[34m");
1780 debug_rtx (operands[2]);
1781 debug_rtx (operands[3]);
1782 debug_rtx (operands[4]);
1783 debug_rtx (operands[5]);
1784 fprintf(stderr, "\033[0m");
1785 #endif
1788 /* Emit a setcc instruction in its entirity. */
1790 static bool
1791 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1793 rtx tmp;
1795 switch (code)
1797 case GT:
1798 case GTU:
1799 tmp = op1, op1 = op2, op2 = tmp;
1800 code = swap_condition (code);
1801 /* FALLTHRU */
1803 case LT:
1804 case LTU:
1805 op1 = force_reg (SImode, op1);
1806 emit_insn (gen_rtx_SET (VOIDmode, dest,
1807 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1808 return true;
1810 case EQ:
1811 if (op2 != const0_rtx)
1812 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1813 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1814 return true;
1816 case NE:
1817 /* Branchful sequence:
1818 mov dest, 0 16-bit
1819 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1820 mov dest, 1 16-bit
1822 Branchless sequence:
1823 add3 tmp, op1, -op2 32-bit (or mov + sub)
1824 sltu3 tmp, tmp, 1 16-bit
1825 xor3 dest, tmp, 1 32-bit
1827 if (optimize_size && op2 != const0_rtx)
1828 return false;
1830 if (op2 != const0_rtx)
1831 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1833 op2 = gen_reg_rtx (SImode);
1834 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1836 emit_insn (gen_rtx_SET (VOIDmode, dest,
1837 gen_rtx_XOR (SImode, op2, const1_rtx)));
1838 return true;
1840 case LE:
1841 if (GET_CODE (op2) != CONST_INT
1842 || INTVAL (op2) == 0x7ffffff)
1843 return false;
1844 op2 = GEN_INT (INTVAL (op2) + 1);
1845 return mep_expand_setcc_1 (LT, dest, op1, op2);
1847 case LEU:
1848 if (GET_CODE (op2) != CONST_INT
1849 || INTVAL (op2) == -1)
1850 return false;
1851 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1852 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1854 case GE:
1855 if (GET_CODE (op2) != CONST_INT
1856 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1857 return false;
1858 op2 = GEN_INT (INTVAL (op2) - 1);
1859 return mep_expand_setcc_1 (GT, dest, op1, op2);
1861 case GEU:
1862 if (GET_CODE (op2) != CONST_INT
1863 || op2 == const0_rtx)
1864 return false;
1865 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1866 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1868 default:
1869 gcc_unreachable ();
1873 bool
1874 mep_expand_setcc (rtx *operands)
1876 rtx dest = operands[0];
1877 enum rtx_code code = GET_CODE (operands[1]);
1878 rtx op0 = operands[2];
1879 rtx op1 = operands[3];
1881 return mep_expand_setcc_1 (code, dest, op0, op1);
1885 mep_expand_cbranch (rtx *operands)
1887 enum rtx_code code = GET_CODE (operands[0]);
1888 rtx op0 = operands[1];
1889 rtx op1 = operands[2];
1890 rtx tmp;
1892 restart:
1893 switch (code)
1895 case LT:
1896 if (mep_imm4_operand (op1, SImode))
1897 break;
1899 tmp = gen_reg_rtx (SImode);
1900 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1901 code = NE;
1902 op0 = tmp;
1903 op1 = const0_rtx;
1904 break;
1906 case GE:
1907 if (mep_imm4_operand (op1, SImode))
1908 break;
1910 tmp = gen_reg_rtx (SImode);
1911 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1913 code = EQ;
1914 op0 = tmp;
1915 op1 = const0_rtx;
1916 break;
1918 case EQ:
1919 case NE:
1920 if (! mep_reg_or_imm4_operand (op1, SImode))
1921 op1 = force_reg (SImode, op1);
1922 break;
1924 case LE:
1925 case GT:
1926 if (GET_CODE (op1) == CONST_INT
1927 && INTVAL (op1) != 0x7fffffff)
1929 op1 = GEN_INT (INTVAL (op1) + 1);
1930 code = (code == LE ? LT : GE);
1931 goto restart;
1934 tmp = gen_reg_rtx (SImode);
1935 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1937 code = (code == LE ? EQ : NE);
1938 op0 = tmp;
1939 op1 = const0_rtx;
1940 break;
1942 case LTU:
1943 if (op1 == const1_rtx)
1945 code = EQ;
1946 op1 = const0_rtx;
1947 break;
1950 tmp = gen_reg_rtx (SImode);
1951 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1952 code = NE;
1953 op0 = tmp;
1954 op1 = const0_rtx;
1955 break;
1957 case LEU:
1958 tmp = gen_reg_rtx (SImode);
1959 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1960 code = NE;
1961 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1962 code = EQ;
1963 else
1964 gcc_unreachable ();
1965 op0 = tmp;
1966 op1 = const0_rtx;
1967 break;
1969 case GTU:
1970 tmp = gen_reg_rtx (SImode);
1971 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1972 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1973 code = NE;
1974 op0 = tmp;
1975 op1 = const0_rtx;
1976 break;
1978 case GEU:
1979 tmp = gen_reg_rtx (SImode);
1980 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1981 code = NE;
1982 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1983 code = EQ;
1984 else
1985 gcc_unreachable ();
1986 op0 = tmp;
1987 op1 = const0_rtx;
1988 break;
1990 default:
1991 gcc_unreachable ();
1994 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1997 const char *
1998 mep_emit_cbranch (rtx *operands, int ne)
2000 if (GET_CODE (operands[1]) == REG)
2001 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
2002 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
2003 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
2004 else
2005 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
2008 void
2009 mep_expand_call (rtx *operands, int returns_value)
2011 rtx addr = operands[returns_value];
2012 rtx tp = mep_tp_rtx ();
2013 rtx gp = mep_gp_rtx ();
2015 gcc_assert (GET_CODE (addr) == MEM);
2017 addr = XEXP (addr, 0);
2019 if (! mep_call_address_operand (addr, VOIDmode))
2020 addr = force_reg (SImode, addr);
2022 if (! operands[returns_value+2])
2023 operands[returns_value+2] = const0_rtx;
2025 if (returns_value)
2026 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
2027 operands[3], tp, gp));
2028 else
2029 emit_call_insn (gen_call_internal (addr, operands[1],
2030 operands[2], tp, gp));
2033 /* Aliasing Support. */
2035 /* If X is a machine specific address (i.e. a symbol or label being
2036 referenced as a displacement from the GOT implemented using an
2037 UNSPEC), then return the base term. Otherwise return X. */
2040 mep_find_base_term (rtx x)
2042 rtx base, term;
2043 int unspec;
2045 if (GET_CODE (x) != PLUS)
2046 return x;
2047 base = XEXP (x, 0);
2048 term = XEXP (x, 1);
2050 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2051 && base == mep_tp_rtx ())
2052 unspec = UNS_TPREL;
2053 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2054 && base == mep_gp_rtx ())
2055 unspec = UNS_GPREL;
2056 else
2057 return x;
2059 if (GET_CODE (term) != CONST)
2060 return x;
2061 term = XEXP (term, 0);
2063 if (GET_CODE (term) != UNSPEC
2064 || XINT (term, 1) != unspec)
2065 return x;
2067 return XVECEXP (term, 0, 0);
2070 /* Reload Support. */
2072 /* Return true if the registers in CLASS cannot represent the change from
2073 modes FROM to TO. */
2075 bool
2076 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2077 enum reg_class regclass)
2079 if (from == to)
2080 return false;
2082 /* 64-bit COP regs must remain 64-bit COP regs. */
2083 if (TARGET_64BIT_CR_REGS
2084 && (regclass == CR_REGS
2085 || regclass == LOADABLE_CR_REGS)
2086 && (GET_MODE_SIZE (to) < 8
2087 || GET_MODE_SIZE (from) < 8))
2088 return true;
2090 return false;
2093 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2095 static bool
2096 mep_general_reg (rtx x)
2098 while (GET_CODE (x) == SUBREG)
2099 x = XEXP (x, 0);
2100 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2103 static bool
2104 mep_nongeneral_reg (rtx x)
2106 while (GET_CODE (x) == SUBREG)
2107 x = XEXP (x, 0);
2108 return (GET_CODE (x) == REG
2109 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2112 static bool
2113 mep_general_copro_reg (rtx x)
2115 while (GET_CODE (x) == SUBREG)
2116 x = XEXP (x, 0);
2117 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2120 static bool
2121 mep_nonregister (rtx x)
2123 while (GET_CODE (x) == SUBREG)
2124 x = XEXP (x, 0);
2125 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2128 #define DEBUG_RELOAD 0
2130 /* Return the secondary reload class needed for moving value X to or
2131 from a register in coprocessor register class CLASS. */
2133 static enum reg_class
2134 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2136 if (mep_general_reg (x))
2137 /* We can do the move directly if mep_have_core_copro_moves_p,
2138 otherwise we need to go through memory. Either way, no secondary
2139 register is needed. */
2140 return NO_REGS;
2142 if (mep_general_copro_reg (x))
2144 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2145 if (mep_have_copro_copro_moves_p)
2146 return NO_REGS;
2148 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2149 if (mep_have_core_copro_moves_p)
2150 return GENERAL_REGS;
2152 /* Otherwise we need to do it through memory. No secondary
2153 register is needed. */
2154 return NO_REGS;
2157 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2158 && constraint_satisfied_p (x, CONSTRAINT_U))
2159 /* X is a memory value that we can access directly. */
2160 return NO_REGS;
2162 /* We have to move X into a GPR first and then copy it to
2163 the coprocessor register. The move from the GPR to the
2164 coprocessor might be done directly or through memory,
2165 depending on mep_have_core_copro_moves_p. */
2166 return GENERAL_REGS;
2169 /* Copying X to register in RCLASS. */
2172 mep_secondary_input_reload_class (enum reg_class rclass,
2173 enum machine_mode mode ATTRIBUTE_UNUSED,
2174 rtx x)
2176 int rv = NO_REGS;
2178 #if DEBUG_RELOAD
2179 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2180 debug_rtx (x);
2181 #endif
2183 if (reg_class_subset_p (rclass, CR_REGS))
2184 rv = mep_secondary_copro_reload_class (rclass, x);
2185 else if (MEP_NONGENERAL_CLASS (rclass)
2186 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2187 rv = GENERAL_REGS;
2189 #if DEBUG_RELOAD
2190 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2191 #endif
2192 return rv;
2195 /* Copying register in RCLASS to X. */
2198 mep_secondary_output_reload_class (enum reg_class rclass,
2199 enum machine_mode mode ATTRIBUTE_UNUSED,
2200 rtx x)
2202 int rv = NO_REGS;
2204 #if DEBUG_RELOAD
2205 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2206 debug_rtx (x);
2207 #endif
2209 if (reg_class_subset_p (rclass, CR_REGS))
2210 rv = mep_secondary_copro_reload_class (rclass, x);
2211 else if (MEP_NONGENERAL_CLASS (rclass)
2212 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2213 rv = GENERAL_REGS;
2215 #if DEBUG_RELOAD
2216 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2217 #endif
2219 return rv;
2222 /* Implement SECONDARY_MEMORY_NEEDED. */
2224 bool
2225 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2226 enum machine_mode mode ATTRIBUTE_UNUSED)
2228 if (!mep_have_core_copro_moves_p)
2230 if (reg_classes_intersect_p (rclass1, CR_REGS)
2231 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2232 return true;
2233 if (reg_classes_intersect_p (rclass2, CR_REGS)
2234 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2235 return true;
2236 if (!mep_have_copro_copro_moves_p
2237 && reg_classes_intersect_p (rclass1, CR_REGS)
2238 && reg_classes_intersect_p (rclass2, CR_REGS))
2239 return true;
2241 return false;
2244 void
2245 mep_expand_reload (rtx *operands, enum machine_mode mode)
2247 /* There are three cases for each direction:
2248 register, farsym
2249 control, farsym
2250 control, nearsym */
2252 int s0 = mep_section_tag (operands[0]) == 'f';
2253 int s1 = mep_section_tag (operands[1]) == 'f';
2254 int c0 = mep_nongeneral_reg (operands[0]);
2255 int c1 = mep_nongeneral_reg (operands[1]);
2256 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2258 #if DEBUG_RELOAD
2259 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2260 debug_rtx (operands[0]);
2261 debug_rtx (operands[1]);
2262 #endif
2264 switch (which)
2266 case 00: /* Don't know why this gets here. */
2267 case 02: /* general = far */
2268 emit_move_insn (operands[0], operands[1]);
2269 return;
2271 case 10: /* cr = mem */
2272 case 11: /* cr = cr */
2273 case 01: /* mem = cr */
2274 case 12: /* cr = far */
2275 emit_move_insn (operands[2], operands[1]);
2276 emit_move_insn (operands[0], operands[2]);
2277 return;
2279 case 20: /* far = general */
2280 emit_move_insn (operands[2], XEXP (operands[1], 0));
2281 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2282 return;
2284 case 21: /* far = cr */
2285 case 22: /* far = far */
2286 default:
2287 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2288 which, mode_name[mode]);
2289 debug_rtx (operands[0]);
2290 debug_rtx (operands[1]);
2291 gcc_unreachable ();
2295 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2296 can be moved directly into registers 0 to 7, but not into the rest.
2297 If so, and if the required class includes registers 0 to 7, restrict
2298 it to those registers. */
2300 enum reg_class
2301 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2303 switch (GET_CODE (x))
2305 case CONST_INT:
2306 if (INTVAL (x) >= 0x10000
2307 && INTVAL (x) < 0x01000000
2308 && (INTVAL (x) & 0xffff) != 0
2309 && reg_class_subset_p (TPREL_REGS, rclass))
2310 rclass = TPREL_REGS;
2311 break;
2313 case CONST:
2314 case SYMBOL_REF:
2315 case LABEL_REF:
2316 if (mep_section_tag (x) != 'f'
2317 && reg_class_subset_p (TPREL_REGS, rclass))
2318 rclass = TPREL_REGS;
2319 break;
2321 default:
2322 break;
2324 return rclass;
2327 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2328 moves, 4 for direct double-register moves, and 1000 for anything
2329 that requires a temporary register or temporary stack slot. */
2332 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2334 if (mep_have_copro_copro_moves_p
2335 && reg_class_subset_p (from, CR_REGS)
2336 && reg_class_subset_p (to, CR_REGS))
2338 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2339 return 4;
2340 return 2;
2342 if (reg_class_subset_p (from, CR_REGS)
2343 && reg_class_subset_p (to, CR_REGS))
2345 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2346 return 8;
2347 return 4;
2349 if (reg_class_subset_p (from, CR_REGS)
2350 || reg_class_subset_p (to, CR_REGS))
2352 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2353 return 4;
2354 return 2;
2356 if (mep_secondary_memory_needed (from, to, mode))
2357 return 1000;
2358 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2359 return 1000;
2361 if (GET_MODE_SIZE (mode) > 4)
2362 return 4;
2364 return 2;
2368 /* Functions to save and restore machine-specific function data. */
2370 static struct machine_function *
2371 mep_init_machine_status (void)
2373 struct machine_function *f;
2375 f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2377 return f;
2380 static rtx
2381 mep_allocate_initial_value (rtx reg)
2383 int rss;
2385 if (GET_CODE (reg) != REG)
2386 return NULL_RTX;
2388 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2389 return NULL_RTX;
2391 /* In interrupt functions, the "initial" values of $gp and $tp are
2392 provided by the prologue. They are not necessarily the same as
2393 the values that the caller was using. */
2394 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2395 if (mep_interrupt_p ())
2396 return NULL_RTX;
2398 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2400 cfun->machine->reg_save_size += 4;
2401 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2404 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2405 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2409 mep_return_addr_rtx (int count)
2411 if (count != 0)
2412 return const0_rtx;
2414 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2417 static rtx
2418 mep_tp_rtx (void)
2420 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2423 static rtx
2424 mep_gp_rtx (void)
2426 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2429 static bool
2430 mep_interrupt_p (void)
2432 if (cfun->machine->interrupt_handler == 0)
2434 int interrupt_handler
2435 = (lookup_attribute ("interrupt",
2436 DECL_ATTRIBUTES (current_function_decl))
2437 != NULL_TREE);
2438 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2440 return cfun->machine->interrupt_handler == 2;
2443 static bool
2444 mep_disinterrupt_p (void)
2446 if (cfun->machine->disable_interrupts == 0)
2448 int disable_interrupts
2449 = (lookup_attribute ("disinterrupt",
2450 DECL_ATTRIBUTES (current_function_decl))
2451 != NULL_TREE);
2452 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2454 return cfun->machine->disable_interrupts == 2;
2458 /* Frame/Epilog/Prolog Related. */
2460 static bool
2461 mep_reg_set_p (rtx reg, rtx insn)
2463 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2464 if (INSN_P (insn))
2466 if (FIND_REG_INC_NOTE (insn, reg))
2467 return true;
2468 insn = PATTERN (insn);
2471 if (GET_CODE (insn) == SET
2472 && GET_CODE (XEXP (insn, 0)) == REG
2473 && GET_CODE (XEXP (insn, 1)) == REG
2474 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2475 return false;
2477 return set_of (reg, insn) != NULL_RTX;
2481 #define MEP_SAVES_UNKNOWN 0
2482 #define MEP_SAVES_YES 1
2483 #define MEP_SAVES_MAYBE 2
2484 #define MEP_SAVES_NO 3
2486 static bool
2487 mep_reg_set_in_function (int regno)
2489 rtx reg, insn;
2491 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2492 return true;
2494 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2495 return true;
2497 push_topmost_sequence ();
2498 insn = get_insns ();
2499 pop_topmost_sequence ();
2501 if (!insn)
2502 return false;
2504 reg = gen_rtx_REG (SImode, regno);
2506 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2507 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2508 return true;
2509 return false;
2512 static bool
2513 mep_asm_without_operands_p (void)
2515 if (cfun->machine->asms_without_operands == 0)
2517 rtx insn;
2519 push_topmost_sequence ();
2520 insn = get_insns ();
2521 pop_topmost_sequence ();
2523 cfun->machine->asms_without_operands = 1;
2524 while (insn)
2526 if (INSN_P (insn)
2527 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2529 cfun->machine->asms_without_operands = 2;
2530 break;
2532 insn = NEXT_INSN (insn);
2536 return cfun->machine->asms_without_operands == 2;
2539 /* Interrupt functions save/restore every call-preserved register, and
2540 any call-used register it uses (or all if it calls any function,
2541 since they may get clobbered there too). Here we check to see
2542 which call-used registers need saving. */
2544 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2545 && (r == FIRST_CCR_REGNO + 1 \
2546 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2547 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2549 static bool
2550 mep_interrupt_saved_reg (int r)
2552 if (!mep_interrupt_p ())
2553 return false;
2554 if (r == REGSAVE_CONTROL_TEMP
2555 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2556 return true;
2557 if (mep_asm_without_operands_p ()
2558 && (!fixed_regs[r]
2559 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2560 || IVC2_ISAVED_REG (r)))
2561 return true;
2562 if (!current_function_is_leaf)
2563 /* Function calls mean we need to save $lp. */
2564 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2565 return true;
2566 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2567 /* The interrupt handler might use these registers for repeat blocks,
2568 or it might call a function that does so. */
2569 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2570 return true;
2571 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2572 return false;
2573 /* Functions we call might clobber these. */
2574 if (call_used_regs[r] && !fixed_regs[r])
2575 return true;
2576 /* Additional registers that need to be saved for IVC2. */
2577 if (IVC2_ISAVED_REG (r))
2578 return true;
2580 return false;
2583 static bool
2584 mep_call_saves_register (int r)
2586 if (! cfun->machine->frame_locked)
2588 int rv = MEP_SAVES_NO;
2590 if (cfun->machine->reg_save_slot[r])
2591 rv = MEP_SAVES_YES;
2592 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2593 rv = MEP_SAVES_YES;
2594 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2595 rv = MEP_SAVES_YES;
2596 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2597 rv = MEP_SAVES_YES;
2598 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2599 /* We need these to have stack slots so that they can be set during
2600 unwinding. */
2601 rv = MEP_SAVES_YES;
2602 else if (mep_interrupt_saved_reg (r))
2603 rv = MEP_SAVES_YES;
2604 cfun->machine->reg_saved[r] = rv;
2606 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2609 /* Return true if epilogue uses register REGNO. */
2611 bool
2612 mep_epilogue_uses (int regno)
2614 /* Since $lp is a call-saved register, the generic code will normally
2615 mark it used in the epilogue if it needs to be saved and restored.
2616 However, when profiling is enabled, the profiling code will implicitly
2617 clobber $11. This case has to be handled specially both here and in
2618 mep_call_saves_register. */
2619 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2620 return true;
2621 /* Interrupt functions save/restore pretty much everything. */
2622 return (reload_completed && mep_interrupt_saved_reg (regno));
2625 static int
2626 mep_reg_size (int regno)
2628 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2629 return 8;
2630 return 4;
2633 /* Worker function for TARGET_CAN_ELIMINATE. */
2635 bool
2636 mep_can_eliminate (const int from, const int to)
2638 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2639 ? ! frame_pointer_needed
2640 : true);
2644 mep_elimination_offset (int from, int to)
2646 int reg_save_size;
2647 int i;
2648 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2649 int total_size;
2651 if (!cfun->machine->frame_locked)
2652 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2654 /* We don't count arg_regs_to_save in the arg pointer offset, because
2655 gcc thinks the arg pointer has moved along with the saved regs.
2656 However, we do count it when we adjust $sp in the prologue. */
2657 reg_save_size = 0;
2658 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2659 if (mep_call_saves_register (i))
2660 reg_save_size += mep_reg_size (i);
2662 if (reg_save_size % 8)
2663 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2664 else
2665 cfun->machine->regsave_filler = 0;
2667 /* This is what our total stack adjustment looks like. */
2668 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2670 if (total_size % 8)
2671 cfun->machine->frame_filler = 8 - (total_size % 8);
2672 else
2673 cfun->machine->frame_filler = 0;
2676 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2677 return reg_save_size + cfun->machine->regsave_filler;
2679 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2680 return cfun->machine->frame_filler + frame_size;
2682 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2683 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2685 gcc_unreachable ();
2688 static rtx
2689 F (rtx x)
2691 RTX_FRAME_RELATED_P (x) = 1;
2692 return x;
2695 /* Since the prologue/epilogue code is generated after optimization,
2696 we can't rely on gcc to split constants for us. So, this code
2697 captures all the ways to add a constant to a register in one logic
2698 chunk, including optimizing away insns we just don't need. This
2699 makes the prolog/epilog code easier to follow. */
2700 static void
2701 add_constant (int dest, int src, int value, int mark_frame)
2703 rtx insn;
2704 int hi, lo;
2706 if (src == dest && value == 0)
2707 return;
2709 if (value == 0)
2711 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2712 gen_rtx_REG (SImode, src));
2713 if (mark_frame)
2714 RTX_FRAME_RELATED_P(insn) = 1;
2715 return;
2718 if (value >= -32768 && value <= 32767)
2720 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2721 gen_rtx_REG (SImode, src),
2722 GEN_INT (value)));
2723 if (mark_frame)
2724 RTX_FRAME_RELATED_P(insn) = 1;
2725 return;
2728 /* Big constant, need to use a temp register. We use
2729 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2730 area is always small enough to directly add to). */
2732 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2733 lo = value & 0xffff;
2735 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2736 GEN_INT (hi));
2738 if (lo)
2740 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2741 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2742 GEN_INT (lo)));
2745 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2746 gen_rtx_REG (SImode, src),
2747 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2748 if (mark_frame)
2750 RTX_FRAME_RELATED_P(insn) = 1;
2751 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2752 gen_rtx_SET (SImode,
2753 gen_rtx_REG (SImode, dest),
2754 gen_rtx_PLUS (SImode,
2755 gen_rtx_REG (SImode, dest),
2756 GEN_INT (value))));
2760 static bool
2761 mep_function_uses_sp (void)
2763 rtx insn;
2764 struct sequence_stack *seq;
2765 rtx sp = gen_rtx_REG (SImode, SP_REGNO);
2767 insn = get_insns ();
2768 for (seq = crtl->emit.sequence_stack;
2769 seq;
2770 insn = seq->first, seq = seq->next);
2772 while (insn)
2774 if (mep_mentioned_p (insn, sp, 0))
2775 return true;
2776 insn = NEXT_INSN (insn);
2778 return false;
2781 /* Move SRC to DEST. Mark the move as being potentially dead if
2782 MAYBE_DEAD_P. */
2784 static rtx
2785 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2787 rtx insn = emit_move_insn (dest, src);
2788 #if 0
2789 if (maybe_dead_p)
2790 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2791 #endif
2792 return insn;
2795 /* Used for interrupt functions, which can't assume that $tp and $gp
2796 contain the correct pointers. */
2798 static void
2799 mep_reload_pointer (int regno, const char *symbol)
2801 rtx reg, sym;
2803 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2804 return;
2806 reg = gen_rtx_REG (SImode, regno);
2807 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2808 emit_insn (gen_movsi_topsym_s (reg, sym));
2809 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2812 /* Assign save slots for any register not already saved. DImode
2813 registers go at the end of the reg save area; the rest go at the
2814 beginning. This is for alignment purposes. Returns true if a frame
2815 is really needed. */
2816 static bool
2817 mep_assign_save_slots (int reg_save_size)
2819 bool really_need_stack_frame = false;
2820 int di_ofs = 0;
2821 int i;
2823 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2824 if (mep_call_saves_register(i))
2826 int regsize = mep_reg_size (i);
2828 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2829 || mep_reg_set_in_function (i))
2830 really_need_stack_frame = true;
2832 if (cfun->machine->reg_save_slot[i])
2833 continue;
2835 if (regsize < 8)
2837 cfun->machine->reg_save_size += regsize;
2838 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2840 else
2842 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2843 di_ofs += 8;
2846 cfun->machine->frame_locked = 1;
2847 return really_need_stack_frame;
2850 void
2851 mep_expand_prologue (void)
2853 int i, rss, sp_offset = 0;
2854 int reg_save_size;
2855 int frame_size;
2856 int really_need_stack_frame;
2858 /* We must not allow register renaming in interrupt functions,
2859 because that invalidates the correctness of the set of call-used
2860 registers we're going to save/restore. */
2861 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2863 if (mep_disinterrupt_p ())
2864 emit_insn (gen_mep_disable_int ());
2866 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2868 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2869 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2870 really_need_stack_frame = frame_size;
2872 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2874 sp_offset = reg_save_size;
2875 if (sp_offset + frame_size < 128)
2876 sp_offset += frame_size ;
2878 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2880 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2881 if (mep_call_saves_register(i))
2883 rtx mem;
2884 bool maybe_dead_p;
2885 enum machine_mode rmode;
2887 rss = cfun->machine->reg_save_slot[i];
2889 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2890 && (!mep_reg_set_in_function (i)
2891 && !mep_interrupt_p ()))
2892 continue;
2894 if (mep_reg_size (i) == 8)
2895 rmode = DImode;
2896 else
2897 rmode = SImode;
2899 /* If there is a pseudo associated with this register's initial value,
2900 reload might have already spilt it to the stack slot suggested by
2901 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2902 deleted as dead. */
2903 mem = gen_rtx_MEM (rmode,
2904 plus_constant (stack_pointer_rtx, sp_offset - rss));
2905 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2907 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2908 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2909 else if (rmode == DImode)
2911 rtx insn;
2912 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2914 mem = gen_rtx_MEM (SImode,
2915 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2917 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2918 gen_rtx_REG (SImode, i),
2919 maybe_dead_p);
2920 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2921 gen_rtx_ZERO_EXTRACT (SImode,
2922 gen_rtx_REG (DImode, i),
2923 GEN_INT (32),
2924 GEN_INT (32)),
2925 maybe_dead_p);
2926 insn = maybe_dead_move (mem,
2927 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2928 maybe_dead_p);
2929 RTX_FRAME_RELATED_P (insn) = 1;
2931 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2932 gen_rtx_SET (VOIDmode,
2933 copy_rtx (mem),
2934 gen_rtx_REG (rmode, i)));
2935 mem = gen_rtx_MEM (SImode,
2936 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2937 insn = maybe_dead_move (mem,
2938 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2939 maybe_dead_p);
2941 else
2943 rtx insn;
2944 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2945 gen_rtx_REG (rmode, i),
2946 maybe_dead_p);
2947 insn = maybe_dead_move (mem,
2948 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2949 maybe_dead_p);
2950 RTX_FRAME_RELATED_P (insn) = 1;
2952 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2953 gen_rtx_SET (VOIDmode,
2954 copy_rtx (mem),
2955 gen_rtx_REG (rmode, i)));
2959 if (frame_pointer_needed)
2961 /* We've already adjusted down by sp_offset. Total $sp change
2962 is reg_save_size + frame_size. We want a net change here of
2963 just reg_save_size. */
2964 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2967 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2969 if (mep_interrupt_p ())
2971 mep_reload_pointer(GP_REGNO, "__sdabase");
2972 mep_reload_pointer(TP_REGNO, "__tpbase");
2976 static void
2977 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2979 int local = hwi_local;
2980 int frame_size = local + crtl->outgoing_args_size;
2981 int reg_save_size;
2982 int ffill;
2983 int i, sp, skip;
2984 int sp_offset;
2985 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2987 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2988 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2989 sp_offset = reg_save_size + frame_size;
2991 ffill = cfun->machine->frame_filler;
2993 if (cfun->machine->mep_frame_pointer_needed)
2994 reg_names[FP_REGNO] = "$fp";
2995 else
2996 reg_names[FP_REGNO] = "$8";
2998 if (sp_offset == 0)
2999 return;
3001 if (debug_info_level == DINFO_LEVEL_NONE)
3003 fprintf (file, "\t# frame: %d", sp_offset);
3004 if (reg_save_size)
3005 fprintf (file, " %d regs", reg_save_size);
3006 if (local)
3007 fprintf (file, " %d locals", local);
3008 if (crtl->outgoing_args_size)
3009 fprintf (file, " %d args", crtl->outgoing_args_size);
3010 fprintf (file, "\n");
3011 return;
3014 fprintf (file, "\t#\n");
3015 fprintf (file, "\t# Initial Frame Information:\n");
3016 if (sp_offset || !frame_pointer_needed)
3017 fprintf (file, "\t# Entry ---------- 0\n");
3019 /* Sort registers by save slots, so they're printed in the order
3020 they appear in memory, not the order they're saved in. */
3021 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
3022 slot_map[si] = si;
3023 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
3024 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
3025 if (cfun->machine->reg_save_slot[slot_map[si]]
3026 > cfun->machine->reg_save_slot[slot_map[sj]])
3028 int t = slot_map[si];
3029 slot_map[si] = slot_map[sj];
3030 slot_map[sj] = t;
3033 sp = 0;
3034 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3036 int rsize;
3037 int r = slot_map[i];
3038 int rss = cfun->machine->reg_save_slot[r];
3040 if (!mep_call_saves_register (r))
3041 continue;
3043 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
3044 && (!mep_reg_set_in_function (r)
3045 && !mep_interrupt_p ()))
3046 continue;
3048 rsize = mep_reg_size(r);
3049 skip = rss - (sp+rsize);
3050 if (skip)
3051 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3052 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
3053 rsize, reg_names[r], sp_offset - rss);
3054 sp = rss;
3057 skip = reg_save_size - sp;
3058 if (skip)
3059 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3061 if (frame_pointer_needed)
3062 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3063 if (local)
3064 fprintf (file, "\t# %3d bytes for local vars\n", local);
3065 if (ffill)
3066 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3067 if (crtl->outgoing_args_size)
3068 fprintf (file, "\t# %3d bytes for outgoing args\n",
3069 crtl->outgoing_args_size);
3070 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3071 fprintf (file, "\t#\n");
3075 static int mep_prevent_lp_restore = 0;
3076 static int mep_sibcall_epilogue = 0;
3078 void
3079 mep_expand_epilogue (void)
3081 int i, sp_offset = 0;
3082 int reg_save_size = 0;
3083 int frame_size;
3084 int lp_temp = LP_REGNO, lp_slot = -1;
3085 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3086 int interrupt_handler = mep_interrupt_p ();
3088 if (profile_arc_flag == 2)
3089 emit_insn (gen_mep_bb_trace_ret ());
3091 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3092 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3094 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3096 if (frame_pointer_needed)
3098 /* If we have a frame pointer, we won't have a reliable stack
3099 pointer (alloca, you know), so rebase SP from FP */
3100 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3101 gen_rtx_REG (SImode, FP_REGNO));
3102 sp_offset = reg_save_size;
3104 else
3106 /* SP is right under our local variable space. Adjust it if
3107 needed. */
3108 sp_offset = reg_save_size + frame_size;
3109 if (sp_offset >= 128)
3111 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3112 sp_offset -= frame_size;
3116 /* This is backwards so that we restore the control and coprocessor
3117 registers before the temporary registers we use to restore
3118 them. */
3119 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3120 if (mep_call_saves_register (i))
3122 enum machine_mode rmode;
3123 int rss = cfun->machine->reg_save_slot[i];
3125 if (mep_reg_size (i) == 8)
3126 rmode = DImode;
3127 else
3128 rmode = SImode;
3130 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3131 && !(mep_reg_set_in_function (i) || interrupt_handler))
3132 continue;
3133 if (mep_prevent_lp_restore && i == LP_REGNO)
3134 continue;
3135 if (!mep_prevent_lp_restore
3136 && !interrupt_handler
3137 && (i == 10 || i == 11))
3138 continue;
3140 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3141 emit_move_insn (gen_rtx_REG (rmode, i),
3142 gen_rtx_MEM (rmode,
3143 plus_constant (stack_pointer_rtx,
3144 sp_offset-rss)));
3145 else
3147 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3148 /* Defer this one so we can jump indirect rather than
3149 copying the RA to $lp and "ret". EH epilogues
3150 automatically skip this anyway. */
3151 lp_slot = sp_offset-rss;
3152 else
3154 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3155 gen_rtx_MEM (rmode,
3156 plus_constant (stack_pointer_rtx,
3157 sp_offset-rss)));
3158 emit_move_insn (gen_rtx_REG (rmode, i),
3159 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3163 if (lp_slot != -1)
3165 /* Restore this one last so we know it will be in the temp
3166 register when we return by jumping indirectly via the temp. */
3167 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3168 gen_rtx_MEM (SImode,
3169 plus_constant (stack_pointer_rtx,
3170 lp_slot)));
3171 lp_temp = REGSAVE_CONTROL_TEMP;
3175 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3177 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3178 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3179 gen_rtx_REG (SImode, SP_REGNO),
3180 cfun->machine->eh_stack_adjust));
3182 if (mep_sibcall_epilogue)
3183 return;
3185 if (mep_disinterrupt_p ())
3186 emit_insn (gen_mep_enable_int ());
3188 if (mep_prevent_lp_restore)
3190 emit_jump_insn (gen_eh_return_internal ());
3191 emit_barrier ();
3193 else if (interrupt_handler)
3194 emit_jump_insn (gen_mep_reti ());
3195 else
3196 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3199 void
3200 mep_expand_eh_return (rtx *operands)
3202 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3204 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3205 emit_move_insn (ra, operands[0]);
3206 operands[0] = ra;
3209 emit_insn (gen_eh_epilogue (operands[0]));
3212 void
3213 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3215 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3216 mep_prevent_lp_restore = 1;
3217 mep_expand_epilogue ();
3218 mep_prevent_lp_restore = 0;
3221 void
3222 mep_expand_sibcall_epilogue (void)
3224 mep_sibcall_epilogue = 1;
3225 mep_expand_epilogue ();
3226 mep_sibcall_epilogue = 0;
3229 static bool
3230 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3232 if (decl == NULL)
3233 return false;
3235 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3236 return false;
3238 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3239 if (mep_interrupt_p () || mep_disinterrupt_p ())
3240 return false;
3242 return true;
3246 mep_return_stackadj_rtx (void)
3248 return gen_rtx_REG (SImode, 10);
3252 mep_return_handler_rtx (void)
3254 return gen_rtx_REG (SImode, LP_REGNO);
3257 void
3258 mep_function_profiler (FILE *file)
3260 /* Always right at the beginning of the function. */
3261 fprintf (file, "\t# mep function profiler\n");
3262 fprintf (file, "\tadd\t$sp, -8\n");
3263 fprintf (file, "\tsw\t$0, ($sp)\n");
3264 fprintf (file, "\tldc\t$0, $lp\n");
3265 fprintf (file, "\tsw\t$0, 4($sp)\n");
3266 fprintf (file, "\tbsr\t__mep_mcount\n");
3267 fprintf (file, "\tlw\t$0, 4($sp)\n");
3268 fprintf (file, "\tstc\t$0, $lp\n");
3269 fprintf (file, "\tlw\t$0, ($sp)\n");
3270 fprintf (file, "\tadd\t$sp, 8\n\n");
3273 const char *
3274 mep_emit_bb_trace_ret (void)
3276 fprintf (asm_out_file, "\t# end of block profiling\n");
3277 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3278 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3279 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3280 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3281 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3282 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3283 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3284 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3285 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3286 return "";
3289 #undef SAVE
3290 #undef RESTORE
3292 /* Operand Printing. */
3294 void
3295 mep_print_operand_address (FILE *stream, rtx address)
3297 if (GET_CODE (address) == MEM)
3298 address = XEXP (address, 0);
3299 else
3300 /* cf: gcc.dg/asm-4.c. */
3301 gcc_assert (GET_CODE (address) == REG);
3303 mep_print_operand (stream, address, 0);
3306 static struct
3308 char code;
3309 const char *pattern;
3310 const char *format;
3312 const conversions[] =
3314 { 0, "r", "0" },
3315 { 0, "m+ri", "3(2)" },
3316 { 0, "mr", "(1)" },
3317 { 0, "ms", "(1)" },
3318 { 0, "ml", "(1)" },
3319 { 0, "mLrs", "%lo(3)(2)" },
3320 { 0, "mLr+si", "%lo(4+5)(2)" },
3321 { 0, "m+ru2s", "%tpoff(5)(2)" },
3322 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3323 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3324 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3325 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3326 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3327 { 0, "mi", "(1)" },
3328 { 0, "m+si", "(2+3)" },
3329 { 0, "m+li", "(2+3)" },
3330 { 0, "i", "0" },
3331 { 0, "s", "0" },
3332 { 0, "+si", "1+2" },
3333 { 0, "+u2si", "%tpoff(3+4)" },
3334 { 0, "+u3si", "%sdaoff(3+4)" },
3335 { 0, "l", "0" },
3336 { 'b', "i", "0" },
3337 { 'B', "i", "0" },
3338 { 'U', "i", "0" },
3339 { 'h', "i", "0" },
3340 { 'h', "Hs", "%hi(1)" },
3341 { 'I', "i", "0" },
3342 { 'I', "u2s", "%tpoff(2)" },
3343 { 'I', "u3s", "%sdaoff(2)" },
3344 { 'I', "+u2si", "%tpoff(3+4)" },
3345 { 'I', "+u3si", "%sdaoff(3+4)" },
3346 { 'J', "i", "0" },
3347 { 'P', "mr", "(1\\+),\\0" },
3348 { 'x', "i", "0" },
3349 { 0, 0, 0 }
3352 static int
3353 unique_bit_in (HOST_WIDE_INT i)
3355 switch (i & 0xff)
3357 case 0x01: case 0xfe: return 0;
3358 case 0x02: case 0xfd: return 1;
3359 case 0x04: case 0xfb: return 2;
3360 case 0x08: case 0xf7: return 3;
3361 case 0x10: case 0x7f: return 4;
3362 case 0x20: case 0xbf: return 5;
3363 case 0x40: case 0xdf: return 6;
3364 case 0x80: case 0xef: return 7;
3365 default:
3366 gcc_unreachable ();
3370 static int
3371 bit_size_for_clip (HOST_WIDE_INT i)
3373 int rv;
3375 for (rv = 0; rv < 31; rv ++)
3376 if (((HOST_WIDE_INT) 1 << rv) > i)
3377 return rv + 1;
3378 gcc_unreachable ();
3381 /* Print an operand to a assembler instruction. */
3383 void
3384 mep_print_operand (FILE *file, rtx x, int code)
3386 int i, j;
3387 const char *real_name;
3389 if (code == '<')
3391 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3392 we're using, then skip over the "mep_" part of its name. */
3393 const struct cgen_insn *insn;
3395 if (mep_get_move_insn (mep_cmov, &insn))
3396 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3397 else
3398 mep_intrinsic_unavailable (mep_cmov);
3399 return;
3401 if (code == 'L')
3403 switch (GET_CODE (x))
3405 case AND:
3406 fputs ("clr", file);
3407 return;
3408 case IOR:
3409 fputs ("set", file);
3410 return;
3411 case XOR:
3412 fputs ("not", file);
3413 return;
3414 default:
3415 output_operand_lossage ("invalid %%L code");
3418 if (code == 'M')
3420 /* Print the second operand of a CR <- CR move. If we're using
3421 a two-operand instruction (i.e., a real cmov), then just print
3422 the operand normally. If we're using a "reg, reg, immediate"
3423 instruction such as caddi3, print the operand followed by a
3424 zero field. If we're using a three-register instruction,
3425 print the operand twice. */
3426 const struct cgen_insn *insn;
3428 mep_print_operand (file, x, 0);
3429 if (mep_get_move_insn (mep_cmov, &insn)
3430 && insn_data[insn->icode].n_operands == 3)
3432 fputs (", ", file);
3433 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3434 mep_print_operand (file, x, 0);
3435 else
3436 mep_print_operand (file, const0_rtx, 0);
3438 return;
3441 encode_pattern (x);
3442 for (i = 0; conversions[i].pattern; i++)
3443 if (conversions[i].code == code
3444 && strcmp(conversions[i].pattern, pattern) == 0)
3446 for (j = 0; conversions[i].format[j]; j++)
3447 if (conversions[i].format[j] == '\\')
3449 fputc (conversions[i].format[j+1], file);
3450 j++;
3452 else if (ISDIGIT(conversions[i].format[j]))
3454 rtx r = patternr[conversions[i].format[j] - '0'];
3455 switch (GET_CODE (r))
3457 case REG:
3458 fprintf (file, "%s", reg_names [REGNO (r)]);
3459 break;
3460 case CONST_INT:
3461 switch (code)
3463 case 'b':
3464 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3465 break;
3466 case 'B':
3467 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3468 break;
3469 case 'h':
3470 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3471 break;
3472 case 'U':
3473 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3474 break;
3475 case 'J':
3476 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3477 break;
3478 case 'x':
3479 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3480 && !(INTVAL (r) & 0xff))
3481 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3482 else
3483 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3484 break;
3485 case 'I':
3486 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3487 && conversions[i].format[j+1] == 0)
3489 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3490 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3492 else
3493 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3494 break;
3495 default:
3496 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3497 break;
3499 break;
3500 case CONST_DOUBLE:
3501 fprintf(file, "[const_double 0x%lx]",
3502 (unsigned long) CONST_DOUBLE_HIGH(r));
3503 break;
3504 case SYMBOL_REF:
3505 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3506 assemble_name (file, real_name);
3507 break;
3508 case LABEL_REF:
3509 output_asm_label (r);
3510 break;
3511 default:
3512 fprintf (stderr, "don't know how to print this operand:");
3513 debug_rtx (r);
3514 gcc_unreachable ();
3517 else
3519 if (conversions[i].format[j] == '+'
3520 && (!code || code == 'I')
3521 && ISDIGIT (conversions[i].format[j+1])
3522 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3523 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3524 continue;
3525 fputc(conversions[i].format[j], file);
3527 break;
3529 if (!conversions[i].pattern)
3531 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3532 debug_rtx(x);
3535 return;
3538 void
3539 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3540 int noperands ATTRIBUTE_UNUSED)
3542 /* Despite the fact that MeP is perfectly capable of branching and
3543 doing something else in the same bundle, gcc does jump
3544 optimization *after* scheduling, so we cannot trust the bundling
3545 flags on jump instructions. */
3546 if (GET_MODE (insn) == BImode
3547 && get_attr_slots (insn) != SLOTS_CORE)
3548 fputc ('+', asm_out_file);
3551 /* Function args in registers. */
3553 static void
3554 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3555 enum machine_mode mode ATTRIBUTE_UNUSED,
3556 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3557 int second_time ATTRIBUTE_UNUSED)
3559 int nsave = 4 - (cum->nregs + 1);
3561 if (nsave > 0)
3562 cfun->machine->arg_regs_to_save = nsave;
3563 *pretend_size = nsave * 4;
3566 static int
3567 bytesize (const_tree type, enum machine_mode mode)
3569 if (mode == BLKmode)
3570 return int_size_in_bytes (type);
3571 return GET_MODE_SIZE (mode);
3574 static rtx
3575 mep_expand_builtin_saveregs (void)
3577 int bufsize, i, ns;
3578 rtx regbuf;
3580 ns = cfun->machine->arg_regs_to_save;
3581 if (TARGET_IVC2)
3583 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3584 regbuf = assign_stack_local (SImode, bufsize, 64);
3586 else
3588 bufsize = ns * 4;
3589 regbuf = assign_stack_local (SImode, bufsize, 32);
3592 move_block_from_reg (5-ns, regbuf, ns);
3594 if (TARGET_IVC2)
3596 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3597 int ofs = 8 * ((ns+1)/2);
3599 for (i=0; i<ns; i++)
3601 int rn = (4-ns) + i + 49;
3602 rtx ptr;
3604 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3605 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3606 ofs += 8;
3609 return XEXP (regbuf, 0);
3612 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3614 static tree
3615 mep_build_builtin_va_list (void)
3617 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3618 tree record;
3621 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3623 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3624 get_identifier ("__va_next_gp"), ptr_type_node);
3625 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3626 get_identifier ("__va_next_gp_limit"),
3627 ptr_type_node);
3628 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3629 ptr_type_node);
3630 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3631 ptr_type_node);
3633 DECL_FIELD_CONTEXT (f_next_gp) = record;
3634 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3635 DECL_FIELD_CONTEXT (f_next_cop) = record;
3636 DECL_FIELD_CONTEXT (f_next_stack) = record;
3638 TYPE_FIELDS (record) = f_next_gp;
3639 TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3640 TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3641 TREE_CHAIN (f_next_cop) = f_next_stack;
3643 layout_type (record);
3645 return record;
3648 static void
3649 mep_expand_va_start (tree valist, rtx nextarg)
3651 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3652 tree next_gp, next_gp_limit, next_cop, next_stack;
3653 tree t, u;
3654 int ns;
3656 ns = cfun->machine->arg_regs_to_save;
3658 f_next_gp = TYPE_FIELDS (va_list_type_node);
3659 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3660 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3661 f_next_stack = TREE_CHAIN (f_next_cop);
3663 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3664 NULL_TREE);
3665 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3666 valist, f_next_gp_limit, NULL_TREE);
3667 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3668 NULL_TREE);
3669 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3670 valist, f_next_stack, NULL_TREE);
3672 /* va_list.next_gp = expand_builtin_saveregs (); */
3673 u = make_tree (sizetype, expand_builtin_saveregs ());
3674 u = fold_convert (ptr_type_node, u);
3675 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3676 TREE_SIDE_EFFECTS (t) = 1;
3677 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3679 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3680 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3681 size_int (4 * ns));
3682 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3683 TREE_SIDE_EFFECTS (t) = 1;
3684 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3686 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3687 size_int (8 * ((ns+1)/2)));
3688 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3689 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3690 TREE_SIDE_EFFECTS (t) = 1;
3691 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3693 /* va_list.next_stack = nextarg; */
3694 u = make_tree (ptr_type_node, nextarg);
3695 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3696 TREE_SIDE_EFFECTS (t) = 1;
3697 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3700 static tree
3701 mep_gimplify_va_arg_expr (tree valist, tree type,
3702 tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
3704 HOST_WIDE_INT size, rsize;
3705 bool by_reference, ivc2_vec;
3706 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3707 tree next_gp, next_gp_limit, next_cop, next_stack;
3708 tree label_sover, label_selse;
3709 tree tmp, res_addr;
3711 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3713 size = int_size_in_bytes (type);
3714 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3716 if (by_reference)
3718 type = build_pointer_type (type);
3719 size = 4;
3721 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3723 f_next_gp = TYPE_FIELDS (va_list_type_node);
3724 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3725 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3726 f_next_stack = TREE_CHAIN (f_next_cop);
3728 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3729 NULL_TREE);
3730 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3731 valist, f_next_gp_limit, NULL_TREE);
3732 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3733 NULL_TREE);
3734 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3735 valist, f_next_stack, NULL_TREE);
3737 /* if f_next_gp < f_next_gp_limit
3738 IF (VECTOR_P && IVC2)
3739 val = *f_next_cop;
3740 ELSE
3741 val = *f_next_gp;
3742 f_next_gp += 4;
3743 f_next_cop += 8;
3744 else
3745 label_selse:
3746 val = *f_next_stack;
3747 f_next_stack += rsize;
3748 label_sover:
3751 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3752 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3753 res_addr = create_tmp_var (ptr_type_node, NULL);
3755 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3756 unshare_expr (next_gp_limit));
3757 tmp = build3 (COND_EXPR, void_type_node, tmp,
3758 build1 (GOTO_EXPR, void_type_node,
3759 unshare_expr (label_selse)),
3760 NULL_TREE);
3761 gimplify_and_add (tmp, pre_p);
3763 if (ivc2_vec)
3765 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3766 gimplify_and_add (tmp, pre_p);
3768 else
3770 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3771 gimplify_and_add (tmp, pre_p);
3774 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3775 unshare_expr (next_gp), size_int (4));
3776 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3778 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3779 unshare_expr (next_cop), size_int (8));
3780 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3782 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3783 gimplify_and_add (tmp, pre_p);
3785 /* - - */
3787 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3788 gimplify_and_add (tmp, pre_p);
3790 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3791 gimplify_and_add (tmp, pre_p);
3793 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3794 unshare_expr (next_stack), size_int (rsize));
3795 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3797 /* - - */
3799 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3800 gimplify_and_add (tmp, pre_p);
3802 res_addr = fold_convert (build_pointer_type (type), res_addr);
3804 if (by_reference)
3805 res_addr = build_va_arg_indirect_ref (res_addr);
3807 return build_va_arg_indirect_ref (res_addr);
3810 void
3811 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3812 rtx libname ATTRIBUTE_UNUSED,
3813 tree fndecl ATTRIBUTE_UNUSED)
3815 pcum->nregs = 0;
3817 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3818 pcum->vliw = 1;
3819 else
3820 pcum->vliw = 0;
3824 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3825 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3827 /* VOIDmode is a signal for the backend to pass data to the call
3828 expander via the second operand to the call pattern. We use
3829 this to determine whether to use "jsr" or "jsrv". */
3830 if (mode == VOIDmode)
3831 return GEN_INT (cum.vliw);
3833 /* If we havn't run out of argument registers, return the next. */
3834 if (cum.nregs < 4)
3836 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3837 return gen_rtx_REG (mode, cum.nregs + 49);
3838 else
3839 return gen_rtx_REG (mode, cum.nregs + 1);
3842 /* Otherwise the argument goes on the stack. */
3843 return NULL_RTX;
3846 static bool
3847 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3848 enum machine_mode mode,
3849 const_tree type,
3850 bool named ATTRIBUTE_UNUSED)
3852 int size = bytesize (type, mode);
3854 /* This is non-obvious, but yes, large values passed after we've run
3855 out of registers are *still* passed by reference - we put the
3856 address of the parameter on the stack, as well as putting the
3857 parameter itself elsewhere on the stack. */
3859 if (size <= 0 || size > 8)
3860 return true;
3861 if (size <= 4)
3862 return false;
3863 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3864 return false;
3865 return true;
3868 void
3869 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3870 enum machine_mode mode ATTRIBUTE_UNUSED,
3871 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3873 pcum->nregs += 1;
3876 bool
3877 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3879 int size = bytesize (type, BLKmode);
3880 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3881 return size > 0 && size <= 8 ? 0 : 1;
3882 return size > 0 && size <= 4 ? 0 : 1;
3885 static bool
3886 mep_narrow_volatile_bitfield (void)
3888 return true;
3889 return false;
3892 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3895 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3897 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3898 return gen_rtx_REG (TYPE_MODE (type), 48);
3899 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3902 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3905 mep_libcall_value (enum machine_mode mode)
3907 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3910 /* Handle pipeline hazards. */
3912 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3913 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3915 static int prev_opcode = 0;
3917 /* This isn't as optimal as it could be, because we don't know what
3918 control register the STC opcode is storing in. We only need to add
3919 the nop if it's the relevent register, but we add it for irrelevent
3920 registers also. */
3922 void
3923 mep_asm_output_opcode (FILE *file, const char *ptr)
3925 int this_opcode = op_none;
3926 const char *hazard = 0;
3928 switch (*ptr)
3930 case 'f':
3931 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3932 this_opcode = op_fsft;
3933 break;
3934 case 'r':
3935 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3936 this_opcode = op_ret;
3937 break;
3938 case 's':
3939 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3940 this_opcode = op_stc;
3941 break;
3944 if (prev_opcode == op_stc && this_opcode == op_fsft)
3945 hazard = "nop";
3946 if (prev_opcode == op_stc && this_opcode == op_ret)
3947 hazard = "nop";
3949 if (hazard)
3950 fprintf(file, "%s\t# %s-%s hazard\n\t",
3951 hazard, opnames[prev_opcode], opnames[this_opcode]);
3953 prev_opcode = this_opcode;
3956 /* Handle attributes. */
3958 static tree
3959 mep_validate_based_tiny (tree *node, tree name, tree args,
3960 int flags ATTRIBUTE_UNUSED, bool *no_add)
3962 if (TREE_CODE (*node) != VAR_DECL
3963 && TREE_CODE (*node) != POINTER_TYPE
3964 && TREE_CODE (*node) != TYPE_DECL)
3966 warning (0, "%qE attribute only applies to variables", name);
3967 *no_add = true;
3969 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3971 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3973 warning (0, "address region attributes not allowed with auto storage class");
3974 *no_add = true;
3976 /* Ignore storage attribute of pointed to variable: char __far * x; */
3977 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3979 warning (0, "address region attributes on pointed-to types ignored");
3980 *no_add = true;
3984 return NULL_TREE;
3987 static int
3988 mep_multiple_address_regions (tree list, bool check_section_attr)
3990 tree a;
3991 int count_sections = 0;
3992 int section_attr_count = 0;
3994 for (a = list; a; a = TREE_CHAIN (a))
3996 if (is_attribute_p ("based", TREE_PURPOSE (a))
3997 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3998 || is_attribute_p ("near", TREE_PURPOSE (a))
3999 || is_attribute_p ("far", TREE_PURPOSE (a))
4000 || is_attribute_p ("io", TREE_PURPOSE (a)))
4001 count_sections ++;
4002 if (check_section_attr)
4003 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
4006 if (check_section_attr)
4007 return section_attr_count;
4008 else
4009 return count_sections;
4012 #define MEP_ATTRIBUTES(decl) \
4013 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
4014 : DECL_ATTRIBUTES (decl) \
4015 ? (DECL_ATTRIBUTES (decl)) \
4016 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
4018 static tree
4019 mep_validate_near_far (tree *node, tree name, tree args,
4020 int flags ATTRIBUTE_UNUSED, bool *no_add)
4022 if (TREE_CODE (*node) != VAR_DECL
4023 && TREE_CODE (*node) != FUNCTION_DECL
4024 && TREE_CODE (*node) != METHOD_TYPE
4025 && TREE_CODE (*node) != POINTER_TYPE
4026 && TREE_CODE (*node) != TYPE_DECL)
4028 warning (0, "%qE attribute only applies to variables and functions",
4029 name);
4030 *no_add = true;
4032 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
4034 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
4036 warning (0, "address region attributes not allowed with auto storage class");
4037 *no_add = true;
4039 /* Ignore storage attribute of pointed to variable: char __far * x; */
4040 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
4042 warning (0, "address region attributes on pointed-to types ignored");
4043 *no_add = true;
4046 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
4048 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4049 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
4050 DECL_ATTRIBUTES (*node) = NULL_TREE;
4052 return NULL_TREE;
4055 static tree
4056 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4057 int flags ATTRIBUTE_UNUSED, bool *no_add)
4059 if (TREE_CODE (*node) != FUNCTION_DECL
4060 && TREE_CODE (*node) != METHOD_TYPE)
4062 warning (0, "%qE attribute only applies to functions", name);
4063 *no_add = true;
4065 return NULL_TREE;
4068 static tree
4069 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4070 int flags ATTRIBUTE_UNUSED, bool *no_add)
4072 tree function_type;
4074 if (TREE_CODE (*node) != FUNCTION_DECL)
4076 warning (0, "%qE attribute only applies to functions", name);
4077 *no_add = true;
4078 return NULL_TREE;
4081 if (DECL_DECLARED_INLINE_P (*node))
4082 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4083 DECL_UNINLINABLE (*node) = 1;
4085 function_type = TREE_TYPE (*node);
4087 if (TREE_TYPE (function_type) != void_type_node)
4088 error ("interrupt function must have return type of void");
4090 if (TYPE_ARG_TYPES (function_type)
4091 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4092 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4093 error ("interrupt function must have no arguments");
4095 return NULL_TREE;
4098 static tree
4099 mep_validate_io_cb (tree *node, tree name, tree args,
4100 int flags ATTRIBUTE_UNUSED, bool *no_add)
4102 if (TREE_CODE (*node) != VAR_DECL)
4104 warning (0, "%qE attribute only applies to variables", name);
4105 *no_add = true;
4108 if (args != NULL_TREE)
4110 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4111 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4112 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4114 warning (0, "%qE attribute allows only an integer constant argument",
4115 name);
4116 *no_add = true;
4120 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4121 TREE_THIS_VOLATILE (*node) = 1;
4123 return NULL_TREE;
4126 static tree
4127 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4128 int flags ATTRIBUTE_UNUSED, bool *no_add)
4130 if (TREE_CODE (*node) != FUNCTION_TYPE
4131 && TREE_CODE (*node) != FUNCTION_DECL
4132 && TREE_CODE (*node) != METHOD_TYPE
4133 && TREE_CODE (*node) != FIELD_DECL
4134 && TREE_CODE (*node) != TYPE_DECL)
4136 static int gave_pointer_note = 0;
4137 static int gave_array_note = 0;
4138 static const char * given_type = NULL;
4140 given_type = tree_code_name[TREE_CODE (*node)];
4141 if (TREE_CODE (*node) == POINTER_TYPE)
4142 given_type = "pointers";
4143 if (TREE_CODE (*node) == ARRAY_TYPE)
4144 given_type = "arrays";
4146 if (given_type)
4147 warning (0, "%qE attribute only applies to functions, not %s",
4148 name, given_type);
4149 else
4150 warning (0, "%qE attribute only applies to functions",
4151 name);
4152 *no_add = true;
4154 if (TREE_CODE (*node) == POINTER_TYPE
4155 && !gave_pointer_note)
4157 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4158 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4159 gave_pointer_note = 1;
4162 if (TREE_CODE (*node) == ARRAY_TYPE
4163 && !gave_array_note)
4165 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4166 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4167 gave_array_note = 1;
4170 if (!TARGET_VLIW)
4171 error ("VLIW functions are not allowed without a VLIW configuration");
4172 return NULL_TREE;
4175 static const struct attribute_spec mep_attribute_table[11] =
4177 /* name min max decl type func handler */
4178 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4179 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4180 { "near", 0, 0, false, false, false, mep_validate_near_far },
4181 { "far", 0, 0, false, false, false, mep_validate_near_far },
4182 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4183 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4184 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4185 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4186 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4187 { NULL, 0, 0, false, false, false, NULL }
4190 static bool
4191 mep_function_attribute_inlinable_p (const_tree callee)
4193 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4194 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4195 return (lookup_attribute ("disinterrupt", attrs) == 0
4196 && lookup_attribute ("interrupt", attrs) == 0);
4199 static bool
4200 mep_can_inline_p (tree caller, tree callee)
4202 if (TREE_CODE (callee) == ADDR_EXPR)
4203 callee = TREE_OPERAND (callee, 0);
4205 if (!mep_vliw_function_p (caller)
4206 && mep_vliw_function_p (callee))
4208 return false;
4210 return true;
4213 #define FUNC_CALL 1
4214 #define FUNC_DISINTERRUPT 2
4217 struct GTY(()) pragma_entry {
4218 int used;
4219 int flag;
4220 const char *funcname;
4222 typedef struct pragma_entry pragma_entry;
4224 /* Hash table of farcall-tagged sections. */
4225 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4227 static int
4228 pragma_entry_eq (const void *p1, const void *p2)
4230 const pragma_entry *old = (const pragma_entry *) p1;
4231 const char *new_name = (const char *) p2;
4233 return strcmp (old->funcname, new_name) == 0;
4236 static hashval_t
4237 pragma_entry_hash (const void *p)
4239 const pragma_entry *old = (const pragma_entry *) p;
4240 return htab_hash_string (old->funcname);
4243 static void
4244 mep_note_pragma_flag (const char *funcname, int flag)
4246 pragma_entry **slot;
4248 if (!pragma_htab)
4249 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4250 pragma_entry_eq, NULL);
4252 slot = (pragma_entry **)
4253 htab_find_slot_with_hash (pragma_htab, funcname,
4254 htab_hash_string (funcname), INSERT);
4256 if (!*slot)
4258 *slot = GGC_NEW (pragma_entry);
4259 (*slot)->flag = 0;
4260 (*slot)->used = 0;
4261 (*slot)->funcname = ggc_strdup (funcname);
4263 (*slot)->flag |= flag;
4266 static bool
4267 mep_lookup_pragma_flag (const char *funcname, int flag)
4269 pragma_entry **slot;
4271 if (!pragma_htab)
4272 return false;
4274 if (funcname[0] == '@' && funcname[2] == '.')
4275 funcname += 3;
4277 slot = (pragma_entry **)
4278 htab_find_slot_with_hash (pragma_htab, funcname,
4279 htab_hash_string (funcname), NO_INSERT);
4280 if (slot && *slot && ((*slot)->flag & flag))
4282 (*slot)->used |= flag;
4283 return true;
4285 return false;
4288 bool
4289 mep_lookup_pragma_call (const char *funcname)
4291 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4294 void
4295 mep_note_pragma_call (const char *funcname)
4297 mep_note_pragma_flag (funcname, FUNC_CALL);
4300 bool
4301 mep_lookup_pragma_disinterrupt (const char *funcname)
4303 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4306 void
4307 mep_note_pragma_disinterrupt (const char *funcname)
4309 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4312 static int
4313 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4315 const pragma_entry *d = (const pragma_entry *)(*slot);
4317 if ((d->flag & FUNC_DISINTERRUPT)
4318 && !(d->used & FUNC_DISINTERRUPT))
4319 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4320 return 1;
4323 void
4324 mep_file_cleanups (void)
4326 if (pragma_htab)
4327 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4331 static int
4332 mep_attrlist_to_encoding (tree list, tree decl)
4334 if (mep_multiple_address_regions (list, false) > 1)
4336 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4337 TREE_PURPOSE (TREE_CHAIN (list)),
4338 DECL_NAME (decl),
4339 DECL_SOURCE_LINE (decl));
4340 TREE_CHAIN (list) = NULL_TREE;
4343 while (list)
4345 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4346 return 'b';
4347 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4348 return 't';
4349 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4350 return 'n';
4351 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4352 return 'f';
4353 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4355 if (TREE_VALUE (list)
4356 && TREE_VALUE (TREE_VALUE (list))
4357 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4359 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4360 if (location >= 0
4361 && location <= 0x1000000)
4362 return 'i';
4364 return 'I';
4366 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4367 return 'c';
4368 list = TREE_CHAIN (list);
4370 if (TARGET_TF
4371 && TREE_CODE (decl) == FUNCTION_DECL
4372 && DECL_SECTION_NAME (decl) == 0)
4373 return 'f';
4374 return 0;
4377 static int
4378 mep_comp_type_attributes (const_tree t1, const_tree t2)
4380 int vliw1, vliw2;
4382 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4383 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4385 if (vliw1 != vliw2)
4386 return 0;
4388 return 1;
4391 static void
4392 mep_insert_attributes (tree decl, tree *attributes)
4394 int size;
4395 const char *secname = 0;
4396 tree attrib, attrlist;
4397 char encoding;
4399 if (TREE_CODE (decl) == FUNCTION_DECL)
4401 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4403 if (mep_lookup_pragma_disinterrupt (funcname))
4405 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4406 *attributes = chainon (*attributes, attrib);
4410 if (TREE_CODE (decl) != VAR_DECL
4411 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4412 return;
4414 if (TREE_READONLY (decl) && TARGET_DC)
4415 /* -mdc means that const variables default to the near section,
4416 regardless of the size cutoff. */
4417 return;
4419 /* User specified an attribute, so override the default.
4420 Ignore storage attribute of pointed to variable. char __far * x; */
4421 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4423 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4424 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4425 else if (DECL_ATTRIBUTES (decl) && *attributes)
4426 DECL_ATTRIBUTES (decl) = NULL_TREE;
4429 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4430 encoding = mep_attrlist_to_encoding (attrlist, decl);
4431 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4433 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4434 encoding = mep_attrlist_to_encoding (attrlist, decl);
4436 if (encoding)
4438 /* This means that the declaration has a specific section
4439 attribute, so we should not apply the default rules. */
4441 if (encoding == 'i' || encoding == 'I')
4443 tree attr = lookup_attribute ("io", attrlist);
4444 if (attr
4445 && TREE_VALUE (attr)
4446 && TREE_VALUE (TREE_VALUE(attr)))
4448 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4449 static tree previous_value = 0;
4450 static int previous_location = 0;
4451 static tree previous_name = 0;
4453 /* We take advantage of the fact that gcc will reuse the
4454 same tree pointer when applying an attribute to a
4455 list of decls, but produce a new tree for attributes
4456 on separate source lines, even when they're textually
4457 identical. This is the behavior we want. */
4458 if (TREE_VALUE (attr) == previous_value
4459 && location == previous_location)
4461 warning(0, "__io address 0x%x is the same for %qE and %qE",
4462 location, previous_name, DECL_NAME (decl));
4464 previous_name = DECL_NAME (decl);
4465 previous_location = location;
4466 previous_value = TREE_VALUE (attr);
4469 return;
4473 /* Declarations of arrays can change size. Don't trust them. */
4474 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4475 size = 0;
4476 else
4477 size = int_size_in_bytes (TREE_TYPE (decl));
4479 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4481 if (TREE_PUBLIC (decl)
4482 || DECL_EXTERNAL (decl)
4483 || TREE_STATIC (decl))
4485 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4486 int key = 0;
4488 while (*name)
4489 key += *name++;
4491 switch (key & 3)
4493 case 0:
4494 secname = "based";
4495 break;
4496 case 1:
4497 secname = "tiny";
4498 break;
4499 case 2:
4500 secname = "far";
4501 break;
4502 default:
4507 else
4509 if (size <= mep_based_cutoff && size > 0)
4510 secname = "based";
4511 else if (size <= mep_tiny_cutoff && size > 0)
4512 secname = "tiny";
4513 else if (TARGET_L)
4514 secname = "far";
4517 if (mep_const_section && TREE_READONLY (decl))
4519 if (strcmp (mep_const_section, "tiny") == 0)
4520 secname = "tiny";
4521 else if (strcmp (mep_const_section, "near") == 0)
4522 return;
4523 else if (strcmp (mep_const_section, "far") == 0)
4524 secname = "far";
4527 if (!secname)
4528 return;
4530 if (!mep_multiple_address_regions (*attributes, true)
4531 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4533 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4535 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4536 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4537 and mep_validate_based_tiny. */
4538 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4542 static void
4543 mep_encode_section_info (tree decl, rtx rtl, int first)
4545 rtx rtlname;
4546 const char *oldname;
4547 const char *secname;
4548 char encoding;
4549 char *newname;
4550 tree idp;
4551 int maxsize;
4552 tree type;
4553 tree mep_attributes;
4555 if (! first)
4556 return;
4558 if (TREE_CODE (decl) != VAR_DECL
4559 && TREE_CODE (decl) != FUNCTION_DECL)
4560 return;
4562 rtlname = XEXP (rtl, 0);
4563 if (GET_CODE (rtlname) == SYMBOL_REF)
4564 oldname = XSTR (rtlname, 0);
4565 else if (GET_CODE (rtlname) == MEM
4566 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4567 oldname = XSTR (XEXP (rtlname, 0), 0);
4568 else
4569 gcc_unreachable ();
4571 type = TREE_TYPE (decl);
4572 if (type == error_mark_node)
4573 return;
4574 mep_attributes = MEP_ATTRIBUTES (decl);
4576 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4578 if (encoding)
4580 newname = (char *) alloca (strlen (oldname) + 4);
4581 sprintf (newname, "@%c.%s", encoding, oldname);
4582 idp = get_identifier (newname);
4583 XEXP (rtl, 0) =
4584 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4585 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4586 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4588 switch (encoding)
4590 case 'b':
4591 maxsize = 128;
4592 secname = "based";
4593 break;
4594 case 't':
4595 maxsize = 65536;
4596 secname = "tiny";
4597 break;
4598 case 'n':
4599 maxsize = 0x1000000;
4600 secname = "near";
4601 break;
4602 default:
4603 maxsize = 0;
4604 secname = 0;
4605 break;
4607 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4609 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4610 oldname,
4611 (long) int_size_in_bytes (TREE_TYPE (decl)),
4612 secname,
4613 maxsize);
4618 const char *
4619 mep_strip_name_encoding (const char *sym)
4621 while (1)
4623 if (*sym == '*')
4624 sym++;
4625 else if (*sym == '@' && sym[2] == '.')
4626 sym += 3;
4627 else
4628 return sym;
4632 static section *
4633 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4634 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4636 int readonly = 1;
4637 int encoding;
4639 switch (TREE_CODE (decl))
4641 case VAR_DECL:
4642 if (!TREE_READONLY (decl)
4643 || TREE_SIDE_EFFECTS (decl)
4644 || !DECL_INITIAL (decl)
4645 || (DECL_INITIAL (decl) != error_mark_node
4646 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4647 readonly = 0;
4648 break;
4649 case CONSTRUCTOR:
4650 if (! TREE_CONSTANT (decl))
4651 readonly = 0;
4652 break;
4654 default:
4655 break;
4658 if (TREE_CODE (decl) == FUNCTION_DECL)
4660 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4662 if (name[0] == '@' && name[2] == '.')
4663 encoding = name[1];
4664 else
4665 encoding = 0;
4667 if (flag_function_sections || DECL_ONE_ONLY (decl))
4668 mep_unique_section (decl, 0);
4669 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4671 if (encoding == 'f')
4672 return vftext_section;
4673 else
4674 return vtext_section;
4676 else if (encoding == 'f')
4677 return ftext_section;
4678 else
4679 return text_section;
4682 if (TREE_CODE (decl) == VAR_DECL)
4684 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4686 if (name[0] == '@' && name[2] == '.')
4687 switch (name[1])
4689 case 'b':
4690 return based_section;
4692 case 't':
4693 if (readonly)
4694 return srodata_section;
4695 if (DECL_INITIAL (decl))
4696 return sdata_section;
4697 return tinybss_section;
4699 case 'f':
4700 if (readonly)
4701 return frodata_section;
4702 return far_section;
4704 case 'i':
4705 case 'I':
4706 error_at (DECL_SOURCE_LOCATION (decl),
4707 "variable %D of type %<io%> must be uninitialized", decl);
4708 return data_section;
4710 case 'c':
4711 error_at (DECL_SOURCE_LOCATION (decl),
4712 "variable %D of type %<cb%> must be uninitialized", decl);
4713 return data_section;
4717 if (readonly)
4718 return readonly_data_section;
4720 return data_section;
4723 static void
4724 mep_unique_section (tree decl, int reloc)
4726 static const char *prefixes[][2] =
4728 { ".text.", ".gnu.linkonce.t." },
4729 { ".rodata.", ".gnu.linkonce.r." },
4730 { ".data.", ".gnu.linkonce.d." },
4731 { ".based.", ".gnu.linkonce.based." },
4732 { ".sdata.", ".gnu.linkonce.s." },
4733 { ".far.", ".gnu.linkonce.far." },
4734 { ".ftext.", ".gnu.linkonce.ft." },
4735 { ".frodata.", ".gnu.linkonce.frd." },
4736 { ".srodata.", ".gnu.linkonce.srd." },
4737 { ".vtext.", ".gnu.linkonce.v." },
4738 { ".vftext.", ".gnu.linkonce.vf." }
4740 int sec = 2; /* .data */
4741 int len;
4742 const char *name, *prefix;
4743 char *string;
4745 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4746 if (DECL_RTL (decl))
4747 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4749 if (TREE_CODE (decl) == FUNCTION_DECL)
4751 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4752 sec = 9; /* .vtext */
4753 else
4754 sec = 0; /* .text */
4756 else if (decl_readonly_section (decl, reloc))
4757 sec = 1; /* .rodata */
4759 if (name[0] == '@' && name[2] == '.')
4761 switch (name[1])
4763 case 'b':
4764 sec = 3; /* .based */
4765 break;
4766 case 't':
4767 if (sec == 1)
4768 sec = 8; /* .srodata */
4769 else
4770 sec = 4; /* .sdata */
4771 break;
4772 case 'f':
4773 if (sec == 0)
4774 sec = 6; /* .ftext */
4775 else if (sec == 9)
4776 sec = 10; /* .vftext */
4777 else if (sec == 1)
4778 sec = 7; /* .frodata */
4779 else
4780 sec = 5; /* .far. */
4781 break;
4783 name += 3;
4786 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4787 len = strlen (name) + strlen (prefix);
4788 string = (char *) alloca (len + 1);
4790 sprintf (string, "%s%s", prefix, name);
4792 DECL_SECTION_NAME (decl) = build_string (len, string);
4795 /* Given a decl, a section name, and whether the decl initializer
4796 has relocs, choose attributes for the section. */
4798 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4800 static unsigned int
4801 mep_section_type_flags (tree decl, const char *name, int reloc)
4803 unsigned int flags = default_section_type_flags (decl, name, reloc);
4805 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4806 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4807 flags |= SECTION_MEP_VLIW;
4809 return flags;
4812 /* Switch to an arbitrary section NAME with attributes as specified
4813 by FLAGS. ALIGN specifies any known alignment requirements for
4814 the section; 0 if the default should be used.
4816 Differs from the standard ELF version only in support of VLIW mode. */
4818 static void
4819 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4821 char flagchars[8], *f = flagchars;
4822 const char *type;
4824 if (!(flags & SECTION_DEBUG))
4825 *f++ = 'a';
4826 if (flags & SECTION_WRITE)
4827 *f++ = 'w';
4828 if (flags & SECTION_CODE)
4829 *f++ = 'x';
4830 if (flags & SECTION_SMALL)
4831 *f++ = 's';
4832 if (flags & SECTION_MEP_VLIW)
4833 *f++ = 'v';
4834 *f = '\0';
4836 if (flags & SECTION_BSS)
4837 type = "nobits";
4838 else
4839 type = "progbits";
4841 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4842 name, flagchars, type);
4844 if (flags & SECTION_CODE)
4845 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4846 asm_out_file);
4849 void
4850 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4851 int size, int align, int global)
4853 /* We intentionally don't use mep_section_tag() here. */
4854 if (name[0] == '@'
4855 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4856 && name[2] == '.')
4858 int location = -1;
4859 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4860 DECL_ATTRIBUTES (decl));
4861 if (attr
4862 && TREE_VALUE (attr)
4863 && TREE_VALUE (TREE_VALUE(attr)))
4864 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4865 if (location == -1)
4866 return;
4867 if (global)
4869 fprintf (stream, "\t.globl\t");
4870 assemble_name (stream, name);
4871 fprintf (stream, "\n");
4873 assemble_name (stream, name);
4874 fprintf (stream, " = %d\n", location);
4875 return;
4877 if (name[0] == '@' && name[2] == '.')
4879 const char *sec = 0;
4880 switch (name[1])
4882 case 'b':
4883 switch_to_section (based_section);
4884 sec = ".based";
4885 break;
4886 case 't':
4887 switch_to_section (tinybss_section);
4888 sec = ".sbss";
4889 break;
4890 case 'f':
4891 switch_to_section (farbss_section);
4892 sec = ".farbss";
4893 break;
4895 if (sec)
4897 const char *name2;
4898 int p2align = 0;
4900 while (align > BITS_PER_UNIT)
4902 align /= 2;
4903 p2align ++;
4905 name2 = TARGET_STRIP_NAME_ENCODING (name);
4906 if (global)
4907 fprintf (stream, "\t.globl\t%s\n", name2);
4908 fprintf (stream, "\t.p2align %d\n", p2align);
4909 fprintf (stream, "\t.type\t%s,@object\n", name2);
4910 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4911 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4912 return;
4916 if (!global)
4918 fprintf (stream, "\t.local\t");
4919 assemble_name (stream, name);
4920 fprintf (stream, "\n");
4922 fprintf (stream, "\t.comm\t");
4923 assemble_name (stream, name);
4924 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4927 /* Trampolines. */
4929 static void
4930 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4932 rtx addr = XEXP (m_tramp, 0);
4933 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4935 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4936 LCT_NORMAL, VOIDmode, 3,
4937 addr, Pmode,
4938 fnaddr, Pmode,
4939 static_chain, Pmode);
4942 /* Experimental Reorg. */
4944 static bool
4945 mep_mentioned_p (rtx in,
4946 rtx reg, /* NULL for mem */
4947 int modes_too) /* if nonzero, modes must match also. */
4949 const char *fmt;
4950 int i;
4951 enum rtx_code code;
4953 if (in == 0)
4954 return false;
4955 if (reg && GET_CODE (reg) != REG)
4956 return false;
4958 if (GET_CODE (in) == LABEL_REF)
4959 return (reg == 0);
4961 code = GET_CODE (in);
4963 switch (code)
4965 case MEM:
4966 if (reg)
4967 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4968 return true;
4970 case REG:
4971 if (!reg)
4972 return false;
4973 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4974 return false;
4975 return (REGNO (in) == REGNO (reg));
4977 case SCRATCH:
4978 case CC0:
4979 case PC:
4980 case CONST_INT:
4981 case CONST_DOUBLE:
4982 return false;
4984 default:
4985 break;
4988 /* Set's source should be read-only. */
4989 if (code == SET && !reg)
4990 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4992 fmt = GET_RTX_FORMAT (code);
4994 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4996 if (fmt[i] == 'E')
4998 register int j;
4999 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
5000 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
5001 return true;
5003 else if (fmt[i] == 'e'
5004 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
5005 return true;
5007 return false;
5010 #define EXPERIMENTAL_REGMOVE_REORG 1
5012 #if EXPERIMENTAL_REGMOVE_REORG
5014 static int
5015 mep_compatible_reg_class (int r1, int r2)
5017 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
5018 return 1;
5019 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
5020 return 1;
5021 return 0;
5024 static void
5025 mep_reorg_regmove (rtx insns)
5027 rtx insn, next, pat, follow, *where;
5028 int count = 0, done = 0, replace, before = 0;
5030 if (dump_file)
5031 for (insn = insns; insn; insn = NEXT_INSN (insn))
5032 if (GET_CODE (insn) == INSN)
5033 before++;
5035 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5036 set that uses the r2 and r2 dies there. We replace r2 with r1
5037 and see if it's still a valid insn. If so, delete the first set.
5038 Copied from reorg.c. */
5040 while (!done)
5042 done = 1;
5043 for (insn = insns; insn; insn = next)
5045 next = NEXT_INSN (insn);
5046 if (GET_CODE (insn) != INSN)
5047 continue;
5048 pat = PATTERN (insn);
5050 replace = 0;
5052 if (GET_CODE (pat) == SET
5053 && GET_CODE (SET_SRC (pat)) == REG
5054 && GET_CODE (SET_DEST (pat)) == REG
5055 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5056 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5058 follow = next_nonnote_insn (insn);
5059 if (dump_file)
5060 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5062 while (follow && GET_CODE (follow) == INSN
5063 && GET_CODE (PATTERN (follow)) == SET
5064 && !dead_or_set_p (follow, SET_SRC (pat))
5065 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5066 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5068 if (dump_file)
5069 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5070 follow = next_nonnote_insn (follow);
5073 if (dump_file)
5074 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5075 if (follow && GET_CODE (follow) == INSN
5076 && GET_CODE (PATTERN (follow)) == SET
5077 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5079 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5081 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5083 replace = 1;
5084 where = & SET_SRC (PATTERN (follow));
5087 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5089 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5091 replace = 1;
5092 where = & PATTERN (follow);
5098 /* If so, follow is the corresponding insn */
5099 if (replace)
5101 if (dump_file)
5103 rtx x;
5105 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5106 for (x = insn; x ;x = NEXT_INSN (x))
5108 print_rtl_single (dump_file, x);
5109 if (x == follow)
5110 break;
5111 fprintf (dump_file, "\n");
5115 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5116 follow, where))
5118 count ++;
5119 next = delete_insn (insn);
5120 if (dump_file)
5122 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5123 print_rtl_single (dump_file, follow);
5125 done = 0;
5131 if (dump_file)
5133 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5134 fprintf (dump_file, "=====\n");
5137 #endif
5140 /* Figure out where to put LABEL, which is the label for a repeat loop.
5141 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5142 the loop ends just before LAST_INSN. If SHARED, insns other than the
5143 "repeat" might use LABEL to jump to the loop's continuation point.
5145 Return the last instruction in the adjusted loop. */
5147 static rtx
5148 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5149 bool shared)
5151 rtx next, prev;
5152 int count = 0, code, icode;
5154 if (dump_file)
5155 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5156 INSN_UID (last_insn));
5158 /* Set PREV to the last insn in the loop. */
5159 prev = last_insn;
5160 if (!including)
5161 prev = PREV_INSN (prev);
5163 /* Set NEXT to the next insn after the repeat label. */
5164 next = last_insn;
5165 if (!shared)
5166 while (prev != 0)
5168 code = GET_CODE (prev);
5169 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5170 break;
5172 if (INSN_P (prev))
5174 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5175 prev = XVECEXP (PATTERN (prev), 0, 1);
5177 /* Other insns that should not be in the last two opcodes. */
5178 icode = recog_memoized (prev);
5179 if (icode < 0
5180 || icode == CODE_FOR_repeat
5181 || icode == CODE_FOR_erepeat
5182 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5183 break;
5185 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5186 is the second instruction in a VLIW bundle. In that case,
5187 loop again: if the first instruction also satisfies the
5188 conditions above then we will reach here again and put
5189 both of them into the repeat epilogue. Otherwise both
5190 should remain outside. */
5191 if (GET_MODE (prev) != BImode)
5193 count++;
5194 next = prev;
5195 if (dump_file)
5196 print_rtl_single (dump_file, next);
5197 if (count == 2)
5198 break;
5201 prev = PREV_INSN (prev);
5204 /* See if we're adding the label immediately after the repeat insn.
5205 If so, we need to separate them with a nop. */
5206 prev = prev_real_insn (next);
5207 if (prev)
5208 switch (recog_memoized (prev))
5210 case CODE_FOR_repeat:
5211 case CODE_FOR_erepeat:
5212 if (dump_file)
5213 fprintf (dump_file, "Adding nop inside loop\n");
5214 emit_insn_before (gen_nop (), next);
5215 break;
5217 default:
5218 break;
5221 /* Insert the label. */
5222 emit_label_before (label, next);
5224 /* Insert the nops. */
5225 if (dump_file && count < 2)
5226 fprintf (dump_file, "Adding %d nop%s\n\n",
5227 2 - count, count == 1 ? "" : "s");
5229 for (; count < 2; count++)
5230 if (including)
5231 last_insn = emit_insn_after (gen_nop (), last_insn);
5232 else
5233 emit_insn_before (gen_nop (), last_insn);
5235 return last_insn;
5239 void
5240 mep_emit_doloop (rtx *operands, int is_end)
5242 rtx tag;
5244 if (cfun->machine->doloop_tags == 0
5245 || cfun->machine->doloop_tag_from_end == is_end)
5247 cfun->machine->doloop_tags++;
5248 cfun->machine->doloop_tag_from_end = is_end;
5251 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5252 if (is_end)
5253 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5254 else
5255 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5259 /* Code for converting doloop_begins and doloop_ends into valid
5260 MeP instructions. A doloop_begin is just a placeholder:
5262 $count = unspec ($count)
5264 where $count is initially the number of iterations - 1.
5265 doloop_end has the form:
5267 if ($count-- == 0) goto label
5269 The counter variable is private to the doloop insns, nothing else
5270 relies on its value.
5272 There are three cases, in decreasing order of preference:
5274 1. A loop has exactly one doloop_begin and one doloop_end.
5275 The doloop_end branches to the first instruction after
5276 the doloop_begin.
5278 In this case we can replace the doloop_begin with a repeat
5279 instruction and remove the doloop_end. I.e.:
5281 $count1 = unspec ($count1)
5282 label:
5284 insn1
5285 insn2
5286 if ($count2-- == 0) goto label
5288 becomes:
5290 repeat $count1,repeat_label
5291 label:
5293 repeat_label:
5294 insn1
5295 insn2
5296 # end repeat
5298 2. As for (1), except there are several doloop_ends. One of them
5299 (call it X) falls through to a label L. All the others fall
5300 through to branches to L.
5302 In this case, we remove X and replace the other doloop_ends
5303 with branches to the repeat label. For example:
5305 $count1 = unspec ($count1)
5306 start:
5308 if ($count2-- == 0) goto label
5309 end:
5311 if ($count3-- == 0) goto label
5312 goto end
5314 becomes:
5316 repeat $count1,repeat_label
5317 start:
5319 repeat_label:
5322 # end repeat
5323 end:
5325 goto repeat_label
5327 3. The fallback case. Replace doloop_begins with:
5329 $count = $count + 1
5331 Replace doloop_ends with the equivalent of:
5333 $count = $count - 1
5334 if ($count == 0) goto label
5336 Note that this might need a scratch register if $count
5337 is stored in memory. */
5339 /* A structure describing one doloop_begin. */
5340 struct mep_doloop_begin {
5341 /* The next doloop_begin with the same tag. */
5342 struct mep_doloop_begin *next;
5344 /* The instruction itself. */
5345 rtx insn;
5347 /* The initial counter value. This is known to be a general register. */
5348 rtx counter;
5351 /* A structure describing a doloop_end. */
5352 struct mep_doloop_end {
5353 /* The next doloop_end with the same loop tag. */
5354 struct mep_doloop_end *next;
5356 /* The instruction itself. */
5357 rtx insn;
5359 /* The first instruction after INSN when the branch isn't taken. */
5360 rtx fallthrough;
5362 /* The location of the counter value. Since doloop_end_internal is a
5363 jump instruction, it has to allow the counter to be stored anywhere
5364 (any non-fixed register or memory location). */
5365 rtx counter;
5367 /* The target label (the place where the insn branches when the counter
5368 isn't zero). */
5369 rtx label;
5371 /* A scratch register. Only available when COUNTER isn't stored
5372 in a general register. */
5373 rtx scratch;
5377 /* One do-while loop. */
5378 struct mep_doloop {
5379 /* All the doloop_begins for this loop (in no particular order). */
5380 struct mep_doloop_begin *begin;
5382 /* All the doloop_ends. When there is more than one, arrange things
5383 so that the first one is the most likely to be X in case (2) above. */
5384 struct mep_doloop_end *end;
5388 /* Return true if LOOP can be converted into repeat/repeat_end form
5389 (that is, if it matches cases (1) or (2) above). */
5391 static bool
5392 mep_repeat_loop_p (struct mep_doloop *loop)
5394 struct mep_doloop_end *end;
5395 rtx fallthrough;
5397 /* There must be exactly one doloop_begin and at least one doloop_end. */
5398 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5399 return false;
5401 /* The first doloop_end (X) must branch back to the insn after
5402 the doloop_begin. */
5403 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5404 return false;
5406 /* All the other doloop_ends must branch to the same place as X.
5407 When the branch isn't taken, they must jump to the instruction
5408 after X. */
5409 fallthrough = loop->end->fallthrough;
5410 for (end = loop->end->next; end != 0; end = end->next)
5411 if (end->label != loop->end->label
5412 || !simplejump_p (end->fallthrough)
5413 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5414 return false;
5416 return true;
5420 /* The main repeat reorg function. See comment above for details. */
5422 static void
5423 mep_reorg_repeat (rtx insns)
5425 rtx insn;
5426 struct mep_doloop *loops, *loop;
5427 struct mep_doloop_begin *begin;
5428 struct mep_doloop_end *end;
5430 /* Quick exit if we haven't created any loops. */
5431 if (cfun->machine->doloop_tags == 0)
5432 return;
5434 /* Create an array of mep_doloop structures. */
5435 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5436 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5438 /* Search the function for do-while insns and group them by loop tag. */
5439 for (insn = insns; insn; insn = NEXT_INSN (insn))
5440 if (INSN_P (insn))
5441 switch (recog_memoized (insn))
5443 case CODE_FOR_doloop_begin_internal:
5444 insn_extract (insn);
5445 loop = &loops[INTVAL (recog_data.operand[2])];
5447 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5448 begin->next = loop->begin;
5449 begin->insn = insn;
5450 begin->counter = recog_data.operand[0];
5452 loop->begin = begin;
5453 break;
5455 case CODE_FOR_doloop_end_internal:
5456 insn_extract (insn);
5457 loop = &loops[INTVAL (recog_data.operand[2])];
5459 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5460 end->insn = insn;
5461 end->fallthrough = next_real_insn (insn);
5462 end->counter = recog_data.operand[0];
5463 end->label = recog_data.operand[1];
5464 end->scratch = recog_data.operand[3];
5466 /* If this insn falls through to an unconditional jump,
5467 give it a lower priority than the others. */
5468 if (loop->end != 0 && simplejump_p (end->fallthrough))
5470 end->next = loop->end->next;
5471 loop->end->next = end;
5473 else
5475 end->next = loop->end;
5476 loop->end = end;
5478 break;
5481 /* Convert the insns for each loop in turn. */
5482 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5483 if (mep_repeat_loop_p (loop))
5485 /* Case (1) or (2). */
5486 rtx repeat_label, label_ref;
5488 /* Create a new label for the repeat insn. */
5489 repeat_label = gen_label_rtx ();
5491 /* Replace the doloop_begin with a repeat. */
5492 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5493 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5494 loop->begin->insn);
5495 delete_insn (loop->begin->insn);
5497 /* Insert the repeat label before the first doloop_end.
5498 Fill the gap with nops if there are other doloop_ends. */
5499 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5500 false, loop->end->next != 0);
5502 /* Emit a repeat_end (to improve the readability of the output). */
5503 emit_insn_before (gen_repeat_end (), loop->end->insn);
5505 /* Delete the first doloop_end. */
5506 delete_insn (loop->end->insn);
5508 /* Replace the others with branches to REPEAT_LABEL. */
5509 for (end = loop->end->next; end != 0; end = end->next)
5511 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5512 delete_insn (end->insn);
5513 delete_insn (end->fallthrough);
5516 else
5518 /* Case (3). First replace all the doloop_begins with increment
5519 instructions. */
5520 for (begin = loop->begin; begin != 0; begin = begin->next)
5522 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5523 begin->counter, const1_rtx),
5524 begin->insn);
5525 delete_insn (begin->insn);
5528 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5529 for (end = loop->end; end != 0; end = end->next)
5531 rtx reg;
5533 start_sequence ();
5535 /* Load the counter value into a general register. */
5536 reg = end->counter;
5537 if (!REG_P (reg) || REGNO (reg) > 15)
5539 reg = end->scratch;
5540 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5543 /* Decrement the counter. */
5544 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5545 constm1_rtx));
5547 /* Copy it back to its original location. */
5548 if (reg != end->counter)
5549 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5551 /* Jump back to the start label. */
5552 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5553 end->label));
5554 JUMP_LABEL (insn) = end->label;
5555 LABEL_NUSES (end->label)++;
5557 /* Emit the whole sequence before the doloop_end. */
5558 insn = get_insns ();
5559 end_sequence ();
5560 emit_insn_before (insn, end->insn);
5562 /* Delete the doloop_end. */
5563 delete_insn (end->insn);
5569 static bool
5570 mep_invertable_branch_p (rtx insn)
5572 rtx cond, set;
5573 enum rtx_code old_code;
5574 int i;
5576 set = PATTERN (insn);
5577 if (GET_CODE (set) != SET)
5578 return false;
5579 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5580 return false;
5581 cond = XEXP (XEXP (set, 1), 0);
5582 old_code = GET_CODE (cond);
5583 switch (old_code)
5585 case EQ:
5586 PUT_CODE (cond, NE);
5587 break;
5588 case NE:
5589 PUT_CODE (cond, EQ);
5590 break;
5591 case LT:
5592 PUT_CODE (cond, GE);
5593 break;
5594 case GE:
5595 PUT_CODE (cond, LT);
5596 break;
5597 default:
5598 return false;
5600 INSN_CODE (insn) = -1;
5601 i = recog_memoized (insn);
5602 PUT_CODE (cond, old_code);
5603 INSN_CODE (insn) = -1;
5604 return i >= 0;
5607 static void
5608 mep_invert_branch (rtx insn, rtx after)
5610 rtx cond, set, label;
5611 int i;
5613 set = PATTERN (insn);
5615 gcc_assert (GET_CODE (set) == SET);
5616 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5618 cond = XEXP (XEXP (set, 1), 0);
5619 switch (GET_CODE (cond))
5621 case EQ:
5622 PUT_CODE (cond, NE);
5623 break;
5624 case NE:
5625 PUT_CODE (cond, EQ);
5626 break;
5627 case LT:
5628 PUT_CODE (cond, GE);
5629 break;
5630 case GE:
5631 PUT_CODE (cond, LT);
5632 break;
5633 default:
5634 gcc_unreachable ();
5636 label = gen_label_rtx ();
5637 emit_label_after (label, after);
5638 for (i=1; i<=2; i++)
5639 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5641 rtx ref = XEXP (XEXP (set, 1), i);
5642 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5643 delete_insn (XEXP (ref, 0));
5644 XEXP (ref, 0) = label;
5645 LABEL_NUSES (label) ++;
5646 JUMP_LABEL (insn) = label;
5648 INSN_CODE (insn) = -1;
5649 i = recog_memoized (insn);
5650 gcc_assert (i >= 0);
5653 static void
5654 mep_reorg_erepeat (rtx insns)
5656 rtx insn, prev, label_before, l, x;
5657 int count;
5659 for (insn = insns; insn; insn = NEXT_INSN (insn))
5660 if (JUMP_P (insn)
5661 && ! JUMP_TABLE_DATA_P (insn)
5662 && mep_invertable_branch_p (insn))
5664 if (dump_file)
5666 fprintf (dump_file, "\n------------------------------\n");
5667 fprintf (dump_file, "erepeat: considering this jump:\n");
5668 print_rtl_single (dump_file, insn);
5670 count = simplejump_p (insn) ? 0 : 1;
5671 label_before = 0;
5672 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5674 if (GET_CODE (prev) == CALL_INSN
5675 || BARRIER_P (prev))
5676 break;
5678 if (prev == JUMP_LABEL (insn))
5680 rtx newlast;
5681 if (dump_file)
5682 fprintf (dump_file, "found loop top, %d insns\n", count);
5684 if (LABEL_NUSES (prev) == 1)
5685 /* We're the only user, always safe */ ;
5686 else if (LABEL_NUSES (prev) == 2)
5688 /* See if there's a barrier before this label. If
5689 so, we know nobody inside the loop uses it.
5690 But we must be careful to put the erepeat
5691 *after* the label. */
5692 rtx barrier;
5693 for (barrier = PREV_INSN (prev);
5694 barrier && GET_CODE (barrier) == NOTE;
5695 barrier = PREV_INSN (barrier))
5697 if (barrier && GET_CODE (barrier) != BARRIER)
5698 break;
5700 else
5702 /* We don't know who else, within or without our loop, uses this */
5703 if (dump_file)
5704 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5705 break;
5708 /* Generate a label to be used by the erepat insn. */
5709 l = gen_label_rtx ();
5711 /* Insert the erepeat after INSN's target label. */
5712 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5713 LABEL_NUSES (l)++;
5714 emit_insn_after (x, prev);
5716 /* Insert the erepeat label. */
5717 newlast = (mep_insert_repeat_label_last
5718 (insn, l, !simplejump_p (insn), false));
5719 if (simplejump_p (insn))
5721 emit_insn_before (gen_erepeat_end (), insn);
5722 delete_insn (insn);
5724 else
5726 mep_invert_branch (insn, newlast);
5727 emit_insn_after (gen_erepeat_end (), newlast);
5729 break;
5732 if (LABEL_P (prev))
5734 /* A label is OK if there is exactly one user, and we
5735 can find that user before the next label. */
5736 rtx user = 0;
5737 int safe = 0;
5738 if (LABEL_NUSES (prev) == 1)
5740 for (user = PREV_INSN (prev);
5741 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5742 user = PREV_INSN (user))
5743 if (GET_CODE (user) == JUMP_INSN
5744 && JUMP_LABEL (user) == prev)
5746 safe = INSN_UID (user);
5747 break;
5750 if (!safe)
5751 break;
5752 if (dump_file)
5753 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5754 safe, INSN_UID (prev));
5757 if (INSN_P (prev))
5759 count ++;
5760 if (count == 2)
5761 label_before = prev;
5765 if (dump_file)
5766 fprintf (dump_file, "\n==============================\n");
5769 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5770 always do this on its own. */
5772 static void
5773 mep_jmp_return_reorg (rtx insns)
5775 rtx insn, label, ret;
5776 int ret_code;
5778 for (insn = insns; insn; insn = NEXT_INSN (insn))
5779 if (simplejump_p (insn))
5781 /* Find the fist real insn the jump jumps to. */
5782 label = ret = JUMP_LABEL (insn);
5783 while (ret
5784 && (GET_CODE (ret) == NOTE
5785 || GET_CODE (ret) == CODE_LABEL
5786 || GET_CODE (PATTERN (ret)) == USE))
5787 ret = NEXT_INSN (ret);
5789 if (ret)
5791 /* Is it a return? */
5792 ret_code = recog_memoized (ret);
5793 if (ret_code == CODE_FOR_return_internal
5794 || ret_code == CODE_FOR_eh_return_internal)
5796 /* It is. Replace the jump with a return. */
5797 LABEL_NUSES (label) --;
5798 if (LABEL_NUSES (label) == 0)
5799 delete_insn (label);
5800 PATTERN (insn) = copy_rtx (PATTERN (ret));
5801 INSN_CODE (insn) = -1;
5808 static void
5809 mep_reorg_addcombine (rtx insns)
5811 rtx i, n;
5813 for (i = insns; i; i = NEXT_INSN (i))
5814 if (INSN_P (i)
5815 && INSN_CODE (i) == CODE_FOR_addsi3
5816 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5817 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5818 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5819 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5821 n = NEXT_INSN (i);
5822 if (INSN_P (n)
5823 && INSN_CODE (n) == CODE_FOR_addsi3
5824 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5825 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5826 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5827 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5829 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5830 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5831 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5832 && ic + nc < 32767
5833 && ic + nc > -32768)
5835 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5836 NEXT_INSN (i) = NEXT_INSN (n);
5837 if (NEXT_INSN (i))
5838 PREV_INSN (NEXT_INSN (i)) = i;
5844 /* If this insn adjusts the stack, return the adjustment, else return
5845 zero. */
5846 static int
5847 add_sp_insn_p (rtx insn)
5849 rtx pat;
5851 if (! single_set (insn))
5852 return 0;
5853 pat = PATTERN (insn);
5854 if (GET_CODE (SET_DEST (pat)) != REG)
5855 return 0;
5856 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5857 return 0;
5858 if (GET_CODE (SET_SRC (pat)) != PLUS)
5859 return 0;
5860 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5861 return 0;
5862 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5863 return 0;
5864 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5865 return 0;
5866 return INTVAL (XEXP (SET_SRC (pat), 1));
5869 /* Check for trivial functions that set up an unneeded stack
5870 frame. */
5871 static void
5872 mep_reorg_noframe (rtx insns)
5874 rtx start_frame_insn;
5875 rtx end_frame_insn = 0;
5876 int sp_adjust, sp2;
5877 rtx sp;
5879 /* The first insn should be $sp = $sp + N */
5880 while (insns && ! INSN_P (insns))
5881 insns = NEXT_INSN (insns);
5882 if (!insns)
5883 return;
5885 sp_adjust = add_sp_insn_p (insns);
5886 if (sp_adjust == 0)
5887 return;
5889 start_frame_insn = insns;
5890 sp = SET_DEST (PATTERN (start_frame_insn));
5892 insns = next_real_insn (insns);
5894 while (insns)
5896 rtx next = next_real_insn (insns);
5897 if (!next)
5898 break;
5900 sp2 = add_sp_insn_p (insns);
5901 if (sp2)
5903 if (end_frame_insn)
5904 return;
5905 end_frame_insn = insns;
5906 if (sp2 != -sp_adjust)
5907 return;
5909 else if (mep_mentioned_p (insns, sp, 0))
5910 return;
5911 else if (CALL_P (insns))
5912 return;
5914 insns = next;
5917 if (end_frame_insn)
5919 delete_insn (start_frame_insn);
5920 delete_insn (end_frame_insn);
5924 static void
5925 mep_reorg (void)
5927 rtx insns = get_insns ();
5929 /* We require accurate REG_DEAD notes. */
5930 compute_bb_for_insn ();
5931 df_note_add_problem ();
5932 df_analyze ();
5934 mep_reorg_addcombine (insns);
5935 #if EXPERIMENTAL_REGMOVE_REORG
5936 /* VLIW packing has been done already, so we can't just delete things. */
5937 if (!mep_vliw_function_p (cfun->decl))
5938 mep_reorg_regmove (insns);
5939 #endif
5940 mep_jmp_return_reorg (insns);
5941 mep_bundle_insns (insns);
5942 mep_reorg_repeat (insns);
5943 if (optimize
5944 && !profile_flag
5945 && !profile_arc_flag
5946 && TARGET_OPT_REPEAT
5947 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5948 mep_reorg_erepeat (insns);
5950 /* This may delete *insns so make sure it's last. */
5951 mep_reorg_noframe (insns);
5953 df_finish_pass (false);
5958 /*----------------------------------------------------------------------*/
5959 /* Builtins */
5960 /*----------------------------------------------------------------------*/
5962 /* Element X gives the index into cgen_insns[] of the most general
5963 implementation of intrinsic X. Unimplemented intrinsics are
5964 mapped to -1. */
5965 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5967 /* Element X gives the index of another instruction that is mapped to
5968 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5969 instruction.
5971 Things are set up so that mep_intrinsic_chain[X] < X. */
5972 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5974 /* The bitmask for the current ISA. The ISA masks are declared
5975 in mep-intrin.h. */
5976 unsigned int mep_selected_isa;
5978 struct mep_config {
5979 const char *config_name;
5980 unsigned int isa;
5983 static struct mep_config mep_configs[] = {
5984 #ifdef COPROC_SELECTION_TABLE
5985 COPROC_SELECTION_TABLE,
5986 #endif
5987 { 0, 0 }
5990 /* Initialize the global intrinsics variables above. */
5992 static void
5993 mep_init_intrinsics (void)
5995 size_t i;
5997 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5998 mep_selected_isa = mep_configs[0].isa;
5999 if (mep_config_string != 0)
6000 for (i = 0; mep_configs[i].config_name; i++)
6001 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
6003 mep_selected_isa = mep_configs[i].isa;
6004 break;
6007 /* Assume all intrinsics are unavailable. */
6008 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
6009 mep_intrinsic_insn[i] = -1;
6011 /* Build up the global intrinsic tables. */
6012 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6013 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
6015 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
6016 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
6018 /* See whether we can directly move values between one coprocessor
6019 register and another. */
6020 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6021 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
6022 mep_have_copro_copro_moves_p = true;
6024 /* See whether we can directly move values between core and
6025 coprocessor registers. */
6026 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6027 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6029 mep_have_core_copro_moves_p = 1;
6032 /* Declare all available intrinsic functions. Called once only. */
6034 static tree cp_data_bus_int_type_node;
6035 static tree opaque_vector_type_node;
6036 static tree v8qi_type_node;
6037 static tree v4hi_type_node;
6038 static tree v2si_type_node;
6039 static tree v8uqi_type_node;
6040 static tree v4uhi_type_node;
6041 static tree v2usi_type_node;
6043 static tree
6044 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6046 switch (cr)
6048 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
6049 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
6050 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
6051 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
6052 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
6053 case cgen_regnum_operand_type_CHAR: return char_type_node;
6054 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
6055 case cgen_regnum_operand_type_SI: return intSI_type_node;
6056 case cgen_regnum_operand_type_DI: return intDI_type_node;
6057 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
6058 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
6059 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
6060 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
6061 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
6062 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6063 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6064 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6065 default:
6066 return void_type_node;
6070 static void
6071 mep_init_builtins (void)
6073 size_t i;
6075 if (TARGET_64BIT_CR_REGS)
6076 cp_data_bus_int_type_node = long_long_integer_type_node;
6077 else
6078 cp_data_bus_int_type_node = long_integer_type_node;
6080 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6081 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6082 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6083 v2si_type_node = build_vector_type (intSI_type_node, 2);
6084 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6085 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6086 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6088 (*lang_hooks.decls.pushdecl)
6089 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6090 cp_data_bus_int_type_node));
6092 (*lang_hooks.decls.pushdecl)
6093 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6094 opaque_vector_type_node));
6096 (*lang_hooks.decls.pushdecl)
6097 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6098 v8qi_type_node));
6099 (*lang_hooks.decls.pushdecl)
6100 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6101 v4hi_type_node));
6102 (*lang_hooks.decls.pushdecl)
6103 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6104 v2si_type_node));
6106 (*lang_hooks.decls.pushdecl)
6107 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6108 v8uqi_type_node));
6109 (*lang_hooks.decls.pushdecl)
6110 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6111 v4uhi_type_node));
6112 (*lang_hooks.decls.pushdecl)
6113 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6114 v2usi_type_node));
6116 /* Intrinsics like mep_cadd3 are implemented with two groups of
6117 instructions, one which uses UNSPECs and one which uses a specific
6118 rtl code such as PLUS. Instructions in the latter group belong
6119 to GROUP_KNOWN_CODE.
6121 In such cases, the intrinsic will have two entries in the global
6122 tables above. The unspec form is accessed using builtin functions
6123 while the specific form is accessed using the mep_* enum in
6124 mep-intrin.h.
6126 The idea is that __cop arithmetic and builtin functions have
6127 different optimization requirements. If mep_cadd3() appears in
6128 the source code, the user will surely except gcc to use cadd3
6129 rather than a work-alike such as add3. However, if the user
6130 just writes "a + b", where a or b are __cop variables, it is
6131 reasonable for gcc to choose a core instruction rather than
6132 cadd3 if it believes that is more optimal. */
6133 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6134 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6135 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6137 tree ret_type = void_type_node;
6138 tree bi_type;
6140 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6141 continue;
6143 if (cgen_insns[i].cret_p)
6144 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6146 bi_type = build_function_type (ret_type, 0);
6147 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6148 bi_type,
6149 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6153 /* Report the unavailablity of the given intrinsic. */
6155 #if 1
6156 static void
6157 mep_intrinsic_unavailable (int intrinsic)
6159 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6161 if (already_reported_p[intrinsic])
6162 return;
6164 if (mep_intrinsic_insn[intrinsic] < 0)
6165 error ("coprocessor intrinsic %qs is not available in this configuration",
6166 cgen_intrinsics[intrinsic]);
6167 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6168 error ("%qs is not available in VLIW functions",
6169 cgen_intrinsics[intrinsic]);
6170 else
6171 error ("%qs is not available in non-VLIW functions",
6172 cgen_intrinsics[intrinsic]);
6174 already_reported_p[intrinsic] = 1;
6176 #endif
6179 /* See if any implementation of INTRINSIC is available to the
6180 current function. If so, store the most general implementation
6181 in *INSN_PTR and return true. Return false otherwise. */
6183 static bool
6184 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6186 int i;
6188 i = mep_intrinsic_insn[intrinsic];
6189 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6190 i = mep_intrinsic_chain[i];
6192 if (i >= 0)
6194 *insn_ptr = &cgen_insns[i];
6195 return true;
6197 return false;
6201 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6202 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6203 try using a work-alike instead. In this case, the returned insn
6204 may have three operands rather than two. */
6206 static bool
6207 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6209 size_t i;
6211 if (intrinsic == mep_cmov)
6213 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6214 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6215 return true;
6216 return false;
6218 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6222 /* If ARG is a register operand that is the same size as MODE, convert it
6223 to MODE using a subreg. Otherwise return ARG as-is. */
6225 static rtx
6226 mep_convert_arg (enum machine_mode mode, rtx arg)
6228 if (GET_MODE (arg) != mode
6229 && register_operand (arg, VOIDmode)
6230 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6231 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6232 return arg;
6236 /* Apply regnum conversions to ARG using the description given by REGNUM.
6237 Return the new argument on success and null on failure. */
6239 static rtx
6240 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6242 if (regnum->count == 0)
6243 return arg;
6245 if (GET_CODE (arg) != CONST_INT
6246 || INTVAL (arg) < 0
6247 || INTVAL (arg) >= regnum->count)
6248 return 0;
6250 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6254 /* Try to make intrinsic argument ARG match the given operand.
6255 UNSIGNED_P is true if the argument has an unsigned type. */
6257 static rtx
6258 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6259 int unsigned_p)
6261 if (GET_CODE (arg) == CONST_INT)
6263 /* CONST_INTs can only be bound to integer operands. */
6264 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6265 return 0;
6267 else if (GET_CODE (arg) == CONST_DOUBLE)
6268 /* These hold vector constants. */;
6269 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6271 /* If the argument is a different size from what's expected, we must
6272 have a value in the right mode class in order to convert it. */
6273 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6274 return 0;
6276 /* If the operand is an rvalue, promote or demote it to match the
6277 operand's size. This might not need extra instructions when
6278 ARG is a register value. */
6279 if (operand->constraint[0] != '=')
6280 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6283 /* If the operand is an lvalue, bind the operand to a new register.
6284 The caller will copy this value into ARG after the main
6285 instruction. By doing this always, we produce slightly more
6286 optimal code. */
6287 /* But not for control registers. */
6288 if (operand->constraint[0] == '='
6289 && (! REG_P (arg)
6290 || ! (CONTROL_REGNO_P (REGNO (arg))
6291 || CCR_REGNO_P (REGNO (arg))
6292 || CR_REGNO_P (REGNO (arg)))
6294 return gen_reg_rtx (operand->mode);
6296 /* Try simple mode punning. */
6297 arg = mep_convert_arg (operand->mode, arg);
6298 if (operand->predicate (arg, operand->mode))
6299 return arg;
6301 /* See if forcing the argument into a register will make it match. */
6302 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6303 arg = force_reg (operand->mode, arg);
6304 else
6305 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6306 if (operand->predicate (arg, operand->mode))
6307 return arg;
6309 return 0;
6313 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6314 function FNNAME. OPERAND describes the operand to which ARGNUM
6315 is mapped. */
6317 static void
6318 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6319 int argnum, tree fnname)
6321 size_t i;
6323 if (GET_CODE (arg) == CONST_INT)
6324 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6325 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6327 const struct cgen_immediate_predicate *predicate;
6328 HOST_WIDE_INT argval;
6330 predicate = &cgen_immediate_predicates[i];
6331 argval = INTVAL (arg);
6332 if (argval < predicate->lower || argval >= predicate->upper)
6333 error ("argument %d of %qE must be in the range %d...%d",
6334 argnum, fnname, predicate->lower, predicate->upper - 1);
6335 else
6336 error ("argument %d of %qE must be a multiple of %d",
6337 argnum, fnname, predicate->align);
6338 return;
6341 error ("incompatible type for argument %d of %qE", argnum, fnname);
6344 static rtx
6345 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6346 rtx subtarget ATTRIBUTE_UNUSED,
6347 enum machine_mode mode ATTRIBUTE_UNUSED,
6348 int ignore ATTRIBUTE_UNUSED)
6350 rtx pat, op[10], arg[10];
6351 unsigned int a;
6352 int opindex, unsigned_p[10];
6353 tree fndecl, args;
6354 unsigned int n_args;
6355 tree fnname;
6356 const struct cgen_insn *cgen_insn;
6357 const struct insn_data *idata;
6358 int first_arg = 0;
6359 int return_type = void_type_node;
6360 int builtin_n_args;
6362 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6363 fnname = DECL_NAME (fndecl);
6365 /* Find out which instruction we should emit. Note that some coprocessor
6366 intrinsics may only be available in VLIW mode, or only in normal mode. */
6367 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6369 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6370 return error_mark_node;
6372 idata = &insn_data[cgen_insn->icode];
6374 builtin_n_args = cgen_insn->num_args;
6376 if (cgen_insn->cret_p)
6378 if (cgen_insn->cret_p > 1)
6379 builtin_n_args ++;
6380 first_arg = 1;
6381 return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6382 builtin_n_args --;
6385 /* Evaluate each argument. */
6386 n_args = call_expr_nargs (exp);
6388 if (n_args < builtin_n_args)
6390 error ("too few arguments to %qE", fnname);
6391 return error_mark_node;
6393 if (n_args > builtin_n_args)
6395 error ("too many arguments to %qE", fnname);
6396 return error_mark_node;
6399 for (a = first_arg; a < builtin_n_args+first_arg; a++)
6401 tree value;
6403 args = CALL_EXPR_ARG (exp, a-first_arg);
6405 value = args;
6407 #if 0
6408 if (cgen_insn->regnums[a].reference_p)
6410 if (TREE_CODE (value) != ADDR_EXPR)
6412 debug_tree(value);
6413 error ("argument %d of %qE must be an address", a+1, fnname);
6414 return error_mark_node;
6416 value = TREE_OPERAND (value, 0);
6418 #endif
6420 /* If the argument has been promoted to int, get the unpromoted
6421 value. This is necessary when sub-int memory values are bound
6422 to reference parameters. */
6423 if (TREE_CODE (value) == NOP_EXPR
6424 && TREE_TYPE (value) == integer_type_node
6425 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6426 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6427 < TYPE_PRECISION (TREE_TYPE (value))))
6428 value = TREE_OPERAND (value, 0);
6430 /* If the argument has been promoted to double, get the unpromoted
6431 SFmode value. This is necessary for FMAX support, for example. */
6432 if (TREE_CODE (value) == NOP_EXPR
6433 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6434 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6435 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6436 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6437 value = TREE_OPERAND (value, 0);
6439 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6440 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6441 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6442 if (cgen_insn->regnums[a].reference_p)
6444 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6445 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6447 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6449 if (arg[a] == 0)
6451 error ("argument %d of %qE must be in the range %d...%d",
6452 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6453 return error_mark_node;
6457 for (a=0; a<first_arg; a++)
6459 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6460 arg[a] = target;
6461 else
6462 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6465 /* Convert the arguments into a form suitable for the intrinsic.
6466 Report an error if this isn't possible. */
6467 for (opindex = 0; opindex < idata->n_operands; opindex++)
6469 a = cgen_insn->op_mapping[opindex];
6470 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6471 arg[a], unsigned_p[a]);
6472 if (op[opindex] == 0)
6474 mep_incompatible_arg (&idata->operand[opindex],
6475 arg[a], a + 1 - first_arg, fnname);
6476 return error_mark_node;
6480 /* Emit the instruction. */
6481 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6482 op[5], op[6], op[7], op[8], op[9]);
6484 if (GET_CODE (pat) == SET
6485 && GET_CODE (SET_DEST (pat)) == PC
6486 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6487 emit_jump_insn (pat);
6488 else
6489 emit_insn (pat);
6491 /* Copy lvalues back to their final locations. */
6492 for (opindex = 0; opindex < idata->n_operands; opindex++)
6493 if (idata->operand[opindex].constraint[0] == '=')
6495 a = cgen_insn->op_mapping[opindex];
6496 if (a >= first_arg)
6498 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6499 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6500 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6501 op[opindex]));
6502 else
6504 /* First convert the operand to the right mode, then copy it
6505 into the destination. Doing the conversion as a separate
6506 step (rather than using convert_move) means that we can
6507 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6508 refer to the same register. */
6509 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6510 op[opindex], unsigned_p[a]);
6511 if (!rtx_equal_p (arg[a], op[opindex]))
6512 emit_move_insn (arg[a], op[opindex]);
6517 if (first_arg > 0 && target && target != op[0])
6519 emit_move_insn (target, op[0]);
6522 return target;
6525 static bool
6526 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6528 return false;
6531 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6532 a global register. */
6534 static int
6535 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6537 int regno;
6538 rtx x = *loc;
6540 if (! x)
6541 return 0;
6543 switch (GET_CODE (x))
6545 case SUBREG:
6546 if (REG_P (SUBREG_REG (x)))
6548 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6549 && global_regs[subreg_regno (x)])
6550 return 1;
6551 return 0;
6553 break;
6555 case REG:
6556 regno = REGNO (x);
6557 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6558 return 1;
6559 return 0;
6561 case SCRATCH:
6562 case PC:
6563 case CC0:
6564 case CONST_INT:
6565 case CONST_DOUBLE:
6566 case CONST:
6567 case LABEL_REF:
6568 return 0;
6570 case CALL:
6571 /* A non-constant call might use a global register. */
6572 return 1;
6574 default:
6575 break;
6578 return 0;
6581 /* Returns nonzero if X mentions a global register. */
6583 static int
6584 global_reg_mentioned_p (rtx x)
6586 if (INSN_P (x))
6588 if (CALL_P (x))
6590 if (! RTL_CONST_OR_PURE_CALL_P (x))
6591 return 1;
6592 x = CALL_INSN_FUNCTION_USAGE (x);
6593 if (x == 0)
6594 return 0;
6596 else
6597 x = PATTERN (x);
6600 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6602 /* Scheduling hooks for VLIW mode.
6604 Conceptually this is very simple: we have a two-pack architecture
6605 that takes one core insn and one coprocessor insn to make up either
6606 a 32- or 64-bit instruction word (depending on the option bit set in
6607 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6608 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6609 and one 48-bit cop insn or two 32-bit core/cop insns.
6611 In practice, instruction selection will be a bear. Consider in
6612 VL64 mode the following insns
6614 add $1, 1
6615 cmov $cr0, $0
6617 these cannot pack, since the add is a 16-bit core insn and cmov
6618 is a 32-bit cop insn. However,
6620 add3 $1, $1, 1
6621 cmov $cr0, $0
6623 packs just fine. For good VLIW code generation in VL64 mode, we
6624 will have to have 32-bit alternatives for many of the common core
6625 insns. Not implemented. */
6627 static int
6628 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6630 int cost_specified;
6632 if (REG_NOTE_KIND (link) != 0)
6634 /* See whether INSN and DEP_INSN are intrinsics that set the same
6635 hard register. If so, it is more important to free up DEP_INSN
6636 than it is to free up INSN.
6638 Note that intrinsics like mep_mulr are handled differently from
6639 the equivalent mep.md patterns. In mep.md, if we don't care
6640 about the value of $lo and $hi, the pattern will just clobber
6641 the registers, not set them. Since clobbers don't count as
6642 output dependencies, it is often possible to reorder two mulrs,
6643 even after reload.
6645 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6646 so any pair of mep_mulr()s will be inter-dependent. We should
6647 therefore give the first mep_mulr() a higher priority. */
6648 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6649 && global_reg_mentioned_p (PATTERN (insn))
6650 && global_reg_mentioned_p (PATTERN (dep_insn)))
6651 return 1;
6653 /* If the dependence is an anti or output dependence, assume it
6654 has no cost. */
6655 return 0;
6658 /* If we can't recognize the insns, we can't really do anything. */
6659 if (recog_memoized (dep_insn) < 0)
6660 return cost;
6662 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6663 attribute instead. */
6664 if (!TARGET_H1)
6666 cost_specified = get_attr_latency (dep_insn);
6667 if (cost_specified != 0)
6668 return cost_specified;
6671 return cost;
6674 /* ??? We don't properly compute the length of a load/store insn,
6675 taking into account the addressing mode. */
6677 static int
6678 mep_issue_rate (void)
6680 return TARGET_IVC2 ? 3 : 2;
6683 /* Return true if function DECL was declared with the vliw attribute. */
6685 bool
6686 mep_vliw_function_p (tree decl)
6688 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6691 static rtx
6692 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6694 int i;
6696 for (i = nready - 1; i >= 0; --i)
6698 rtx insn = ready[i];
6699 if (recog_memoized (insn) >= 0
6700 && get_attr_slot (insn) == slot
6701 && get_attr_length (insn) == length)
6702 return insn;
6705 return NULL_RTX;
6708 static void
6709 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6711 int i;
6713 for (i = 0; i < nready; ++i)
6714 if (ready[i] == insn)
6716 for (; i < nready - 1; ++i)
6717 ready[i] = ready[i + 1];
6718 ready[i] = insn;
6719 return;
6722 gcc_unreachable ();
6725 static void
6726 mep_print_sched_insn (FILE *dump, rtx insn)
6728 const char *slots = "none";
6729 const char *name = NULL;
6730 int code;
6731 char buf[30];
6733 if (GET_CODE (PATTERN (insn)) == SET
6734 || GET_CODE (PATTERN (insn)) == PARALLEL)
6736 switch (get_attr_slots (insn))
6738 case SLOTS_CORE: slots = "core"; break;
6739 case SLOTS_C3: slots = "c3"; break;
6740 case SLOTS_P0: slots = "p0"; break;
6741 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6742 case SLOTS_P0_P1: slots = "p0,p1"; break;
6743 case SLOTS_P0S: slots = "p0s"; break;
6744 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6745 case SLOTS_P1: slots = "p1"; break;
6746 default:
6747 sprintf(buf, "%d", get_attr_slots (insn));
6748 slots = buf;
6749 break;
6752 if (GET_CODE (PATTERN (insn)) == USE)
6753 slots = "use";
6755 code = INSN_CODE (insn);
6756 if (code >= 0)
6757 name = get_insn_name (code);
6758 if (!name)
6759 name = "{unknown}";
6761 fprintf (dump,
6762 "insn %4d %4d %8s %s\n",
6763 code,
6764 INSN_UID (insn),
6765 name,
6766 slots);
6769 static int
6770 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6771 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6772 int *pnready, int clock ATTRIBUTE_UNUSED)
6774 int nready = *pnready;
6775 rtx core_insn, cop_insn;
6776 int i;
6778 if (dump && sched_verbose > 1)
6780 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6781 for (i=0; i<nready; i++)
6782 mep_print_sched_insn (dump, ready[i]);
6783 fprintf (dump, "\n");
6786 if (!mep_vliw_function_p (cfun->decl))
6787 return 1;
6788 if (nready < 2)
6789 return 1;
6791 /* IVC2 uses a DFA to determine what's ready and what's not. */
6792 if (TARGET_IVC2)
6793 return nready;
6795 /* We can issue either a core or coprocessor instruction.
6796 Look for a matched pair of insns to reorder. If we don't
6797 find any, don't second-guess the scheduler's priorities. */
6799 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6800 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6801 TARGET_OPT_VL64 ? 6 : 2)))
6803 else if (TARGET_OPT_VL64
6804 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6805 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6807 else
6808 /* We didn't find a pair. Issue the single insn at the head
6809 of the ready list. */
6810 return 1;
6812 /* Reorder the two insns first. */
6813 mep_move_ready_insn (ready, nready, core_insn);
6814 mep_move_ready_insn (ready, nready - 1, cop_insn);
6815 return 2;
6818 /* A for_each_rtx callback. Return true if *X is a register that is
6819 set by insn PREV. */
6821 static int
6822 mep_store_find_set (rtx *x, void *prev)
6824 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6827 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6828 not the containing insn. */
6830 static bool
6831 mep_store_data_bypass_1 (rtx prev, rtx pat)
6833 /* Cope with intrinsics like swcpa. */
6834 if (GET_CODE (pat) == PARALLEL)
6836 int i;
6838 for (i = 0; i < XVECLEN (pat, 0); i++)
6839 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6840 return true;
6842 return false;
6845 /* Check for some sort of store. */
6846 if (GET_CODE (pat) != SET
6847 || GET_CODE (SET_DEST (pat)) != MEM)
6848 return false;
6850 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6851 The first operand to the unspec is the store data and the other operands
6852 are used to calculate the address. */
6853 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6855 rtx src;
6856 int i;
6858 src = SET_SRC (pat);
6859 for (i = 1; i < XVECLEN (src, 0); i++)
6860 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6861 return false;
6863 return true;
6866 /* Otherwise just check that PREV doesn't modify any register mentioned
6867 in the memory destination. */
6868 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6871 /* Return true if INSN is a store instruction and if the store address
6872 has no true dependence on PREV. */
6874 bool
6875 mep_store_data_bypass_p (rtx prev, rtx insn)
6877 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6880 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6881 is a register other than LO or HI and if PREV sets *X. */
6883 static int
6884 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6886 return (REG_P (*x)
6887 && REGNO (*x) != LO_REGNO
6888 && REGNO (*x) != HI_REGNO
6889 && reg_set_p (*x, (const_rtx) prev));
6892 /* Return true if, apart from HI/LO, there are no true dependencies
6893 between multiplication instructions PREV and INSN. */
6895 bool
6896 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6898 rtx pat;
6900 pat = PATTERN (insn);
6901 if (GET_CODE (pat) == PARALLEL)
6902 pat = XVECEXP (pat, 0, 0);
6903 return (GET_CODE (pat) == SET
6904 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6907 /* Return true if INSN is an ldc instruction that issues to the
6908 MeP-h1 integer pipeline. This is true for instructions that
6909 read from PSW, LP, SAR, HI and LO. */
6911 bool
6912 mep_ipipe_ldc_p (rtx insn)
6914 rtx pat, src;
6916 pat = PATTERN (insn);
6918 /* Cope with instrinsics that set both a hard register and its shadow.
6919 The set of the hard register comes first. */
6920 if (GET_CODE (pat) == PARALLEL)
6921 pat = XVECEXP (pat, 0, 0);
6923 if (GET_CODE (pat) == SET)
6925 src = SET_SRC (pat);
6927 /* Cope with intrinsics. The first operand to the unspec is
6928 the source register. */
6929 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6930 src = XVECEXP (src, 0, 0);
6932 if (REG_P (src))
6933 switch (REGNO (src))
6935 case PSW_REGNO:
6936 case LP_REGNO:
6937 case SAR_REGNO:
6938 case HI_REGNO:
6939 case LO_REGNO:
6940 return true;
6943 return false;
6946 /* Create a VLIW bundle from core instruction CORE and coprocessor
6947 instruction COP. COP always satisfies INSN_P, but CORE can be
6948 either a new pattern or an existing instruction.
6950 Emit the bundle in place of COP and return it. */
6952 static rtx
6953 mep_make_bundle (rtx core, rtx cop)
6955 rtx insn;
6957 /* If CORE is an existing instruction, remove it, otherwise put
6958 the new pattern in an INSN harness. */
6959 if (INSN_P (core))
6960 remove_insn (core);
6961 else
6962 core = make_insn_raw (core);
6964 /* Generate the bundle sequence and replace COP with it. */
6965 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6966 insn = emit_insn_after (insn, cop);
6967 remove_insn (cop);
6969 /* Set up the links of the insns inside the SEQUENCE. */
6970 PREV_INSN (core) = PREV_INSN (insn);
6971 NEXT_INSN (core) = cop;
6972 PREV_INSN (cop) = core;
6973 NEXT_INSN (cop) = NEXT_INSN (insn);
6975 /* Set the VLIW flag for the coprocessor instruction. */
6976 PUT_MODE (core, VOIDmode);
6977 PUT_MODE (cop, BImode);
6979 /* Derive a location for the bundle. Individual instructions cannot
6980 have their own location because there can be no assembler labels
6981 between CORE and COP. */
6982 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6983 INSN_LOCATOR (core) = 0;
6984 INSN_LOCATOR (cop) = 0;
6986 return insn;
6989 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6991 static void
6992 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6994 rtx * pinsn = (rtx *) data;
6996 if (*pinsn && reg_mentioned_p (x, *pinsn))
6997 *pinsn = NULL_RTX;
7000 /* Return true if anything in insn X is (anti,output,true) dependent on
7001 anything in insn Y. */
7003 static int
7004 mep_insn_dependent_p (rtx x, rtx y)
7006 rtx tmp;
7008 gcc_assert (INSN_P (x));
7009 gcc_assert (INSN_P (y));
7011 tmp = PATTERN (y);
7012 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
7013 if (tmp == NULL_RTX)
7014 return 1;
7016 tmp = PATTERN (x);
7017 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
7018 if (tmp == NULL_RTX)
7019 return 1;
7021 return 0;
7024 static int
7025 core_insn_p (rtx insn)
7027 if (GET_CODE (PATTERN (insn)) == USE)
7028 return 0;
7029 if (get_attr_slot (insn) == SLOT_CORE)
7030 return 1;
7031 return 0;
7034 /* Mark coprocessor instructions that can be bundled together with
7035 the immediately preceeding core instruction. This is later used
7036 to emit the "+" that tells the assembler to create a VLIW insn.
7038 For unbundled insns, the assembler will automatically add coprocessor
7039 nops, and 16-bit core nops. Due to an apparent oversight in the
7040 spec, the assembler will _not_ automatically add 32-bit core nops,
7041 so we have to emit those here.
7043 Called from mep_insn_reorg. */
7045 static void
7046 mep_bundle_insns (rtx insns)
7048 rtx insn, last = NULL_RTX, first = NULL_RTX;
7049 int saw_scheduling = 0;
7051 /* Only do bundling if we're in vliw mode. */
7052 if (!mep_vliw_function_p (cfun->decl))
7053 return;
7055 /* The first insn in a bundle are TImode, the remainder are
7056 VOIDmode. After this function, the first has VOIDmode and the
7057 rest have BImode. */
7059 /* Note: this doesn't appear to be true for JUMP_INSNs. */
7061 /* First, move any NOTEs that are within a bundle, to the beginning
7062 of the bundle. */
7063 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7065 if (NOTE_P (insn) && first)
7066 /* Don't clear FIRST. */;
7068 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7069 first = insn;
7071 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7073 rtx note, prev;
7075 /* INSN is part of a bundle; FIRST is the first insn in that
7076 bundle. Move all intervening notes out of the bundle.
7077 In addition, since the debug pass may insert a label
7078 whenever the current line changes, set the location info
7079 for INSN to match FIRST. */
7081 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7083 note = PREV_INSN (insn);
7084 while (note && note != first)
7086 prev = PREV_INSN (note);
7088 if (NOTE_P (note))
7090 /* Remove NOTE from here... */
7091 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7092 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7093 /* ...and put it in here. */
7094 NEXT_INSN (note) = first;
7095 PREV_INSN (note) = PREV_INSN (first);
7096 NEXT_INSN (PREV_INSN (note)) = note;
7097 PREV_INSN (NEXT_INSN (note)) = note;
7100 note = prev;
7104 else if (!NONJUMP_INSN_P (insn))
7105 first = 0;
7108 /* Now fix up the bundles. */
7109 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7111 if (NOTE_P (insn))
7112 continue;
7114 if (!NONJUMP_INSN_P (insn))
7116 last = 0;
7117 continue;
7120 /* If we're not optimizing enough, there won't be scheduling
7121 info. We detect that here. */
7122 if (GET_MODE (insn) == TImode)
7123 saw_scheduling = 1;
7124 if (!saw_scheduling)
7125 continue;
7127 if (TARGET_IVC2)
7129 rtx core_insn = NULL_RTX;
7131 /* IVC2 slots are scheduled by DFA, so we just accept
7132 whatever the scheduler gives us. However, we must make
7133 sure the core insn (if any) is the first in the bundle.
7134 The IVC2 assembler can insert whatever NOPs are needed,
7135 and allows a COP insn to be first. */
7137 if (NONJUMP_INSN_P (insn)
7138 && GET_CODE (PATTERN (insn)) != USE
7139 && GET_MODE (insn) == TImode)
7141 for (last = insn;
7142 NEXT_INSN (last)
7143 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7144 && NONJUMP_INSN_P (NEXT_INSN (last));
7145 last = NEXT_INSN (last))
7147 if (core_insn_p (last))
7148 core_insn = last;
7150 if (core_insn_p (last))
7151 core_insn = last;
7153 if (core_insn && core_insn != insn)
7155 /* Swap core insn to first in the bundle. */
7157 /* Remove core insn. */
7158 if (PREV_INSN (core_insn))
7159 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7160 if (NEXT_INSN (core_insn))
7161 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7163 /* Re-insert core insn. */
7164 PREV_INSN (core_insn) = PREV_INSN (insn);
7165 NEXT_INSN (core_insn) = insn;
7167 if (PREV_INSN (core_insn))
7168 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7169 PREV_INSN (insn) = core_insn;
7171 PUT_MODE (core_insn, TImode);
7172 PUT_MODE (insn, VOIDmode);
7176 /* The first insn has TImode, the rest have VOIDmode */
7177 if (GET_MODE (insn) == TImode)
7178 PUT_MODE (insn, VOIDmode);
7179 else
7180 PUT_MODE (insn, BImode);
7181 continue;
7184 PUT_MODE (insn, VOIDmode);
7185 if (recog_memoized (insn) >= 0
7186 && get_attr_slot (insn) == SLOT_COP)
7188 if (GET_CODE (insn) == JUMP_INSN
7189 || ! last
7190 || recog_memoized (last) < 0
7191 || get_attr_slot (last) != SLOT_CORE
7192 || (get_attr_length (insn)
7193 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7194 || mep_insn_dependent_p (insn, last))
7196 switch (get_attr_length (insn))
7198 case 8:
7199 break;
7200 case 6:
7201 insn = mep_make_bundle (gen_nop (), insn);
7202 break;
7203 case 4:
7204 if (TARGET_OPT_VL64)
7205 insn = mep_make_bundle (gen_nop32 (), insn);
7206 break;
7207 case 2:
7208 if (TARGET_OPT_VL64)
7209 error ("2 byte cop instructions are"
7210 " not allowed in 64-bit VLIW mode");
7211 else
7212 insn = mep_make_bundle (gen_nop (), insn);
7213 break;
7214 default:
7215 error ("unexpected %d byte cop instruction",
7216 get_attr_length (insn));
7217 break;
7220 else
7221 insn = mep_make_bundle (last, insn);
7224 last = insn;
7229 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7230 Return true on success. This function can fail if the intrinsic
7231 is unavailable or if the operands don't satisfy their predicates. */
7233 bool
7234 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7236 const struct cgen_insn *cgen_insn;
7237 const struct insn_data *idata;
7238 rtx newop[10];
7239 int i;
7241 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7242 return false;
7244 idata = &insn_data[cgen_insn->icode];
7245 for (i = 0; i < idata->n_operands; i++)
7247 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7248 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7249 return false;
7252 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7253 newop[3], newop[4], newop[5],
7254 newop[6], newop[7], newop[8]));
7256 return true;
7260 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7261 OPERANDS[0]. Report an error if the instruction could not
7262 be synthesized. OPERANDS[1] is a register_operand. For sign
7263 and zero extensions, it may be smaller than SImode. */
7265 bool
7266 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7267 rtx * operands ATTRIBUTE_UNUSED)
7269 return false;
7273 /* Likewise, but apply a binary operation to OPERANDS[1] and
7274 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7275 can be a general_operand.
7277 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7278 third operand. REG and REG3 take register operands only. */
7280 bool
7281 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7282 int ATTRIBUTE_UNUSED immediate3,
7283 int ATTRIBUTE_UNUSED reg,
7284 int ATTRIBUTE_UNUSED reg3,
7285 rtx * operands ATTRIBUTE_UNUSED)
7287 return false;
7290 static bool
7291 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7293 switch (code)
7295 case CONST_INT:
7296 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7297 *total = 0;
7298 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7299 *total = 1;
7300 else
7301 *total = 3;
7302 return true;
7304 case SYMBOL_REF:
7305 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7306 return true;
7308 case MULT:
7309 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7310 ? COSTS_N_INSNS (3)
7311 : COSTS_N_INSNS (2));
7312 return true;
7314 return false;
7317 static int
7318 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7320 return 1;
7323 static bool
7324 mep_handle_option (size_t code,
7325 const char *arg ATTRIBUTE_UNUSED,
7326 int value ATTRIBUTE_UNUSED)
7328 int i;
7330 switch (code)
7332 case OPT_mall_opts:
7333 target_flags |= MEP_ALL_OPTS;
7334 break;
7336 case OPT_mno_opts:
7337 target_flags &= ~ MEP_ALL_OPTS;
7338 break;
7340 case OPT_mcop64:
7341 target_flags |= MASK_COP;
7342 target_flags |= MASK_64BIT_CR_REGS;
7343 break;
7345 case OPT_mtiny_:
7346 option_mtiny_specified = 1;
7348 case OPT_mivc2:
7349 target_flags |= MASK_COP;
7350 target_flags |= MASK_64BIT_CR_REGS;
7351 target_flags |= MASK_VLIW;
7352 target_flags |= MASK_OPT_VL64;
7353 target_flags |= MASK_IVC2;
7355 for (i=0; i<32; i++)
7356 fixed_regs[i+48] = 0;
7357 for (i=0; i<32; i++)
7358 call_used_regs[i+48] = 1;
7359 for (i=6; i<8; i++)
7360 call_used_regs[i+48] = 0;
7362 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7363 RN (0, "$csar0");
7364 RN (1, "$cc");
7365 RN (4, "$cofr0");
7366 RN (5, "$cofr1");
7367 RN (6, "$cofa0");
7368 RN (7, "$cofa1");
7369 RN (15, "$csar1");
7371 RN (16, "$acc0_0");
7372 RN (17, "$acc0_1");
7373 RN (18, "$acc0_2");
7374 RN (19, "$acc0_3");
7375 RN (20, "$acc0_4");
7376 RN (21, "$acc0_5");
7377 RN (22, "$acc0_6");
7378 RN (23, "$acc0_7");
7380 RN (24, "$acc1_0");
7381 RN (25, "$acc1_1");
7382 RN (26, "$acc1_2");
7383 RN (27, "$acc1_3");
7384 RN (28, "$acc1_4");
7385 RN (29, "$acc1_5");
7386 RN (30, "$acc1_6");
7387 RN (31, "$acc1_7");
7388 #undef RN
7390 break;
7392 default:
7393 break;
7395 return TRUE;
7398 static void
7399 mep_asm_init_sections (void)
7401 based_section
7402 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7403 "\t.section .based,\"aw\"");
7405 tinybss_section
7406 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7407 "\t.section .sbss,\"aw\"");
7409 sdata_section
7410 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7411 "\t.section .sdata,\"aw\",@progbits");
7413 far_section
7414 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7415 "\t.section .far,\"aw\"");
7417 farbss_section
7418 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7419 "\t.section .farbss,\"aw\"");
7421 frodata_section
7422 = get_unnamed_section (0, output_section_asm_op,
7423 "\t.section .frodata,\"a\"");
7425 srodata_section
7426 = get_unnamed_section (0, output_section_asm_op,
7427 "\t.section .srodata,\"a\"");
7429 vtext_section
7430 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7431 "\t.section .vtext,\"axv\"\n\t.vliw");
7433 vftext_section
7434 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7435 "\t.section .vftext,\"axv\"\n\t.vliw");
7437 ftext_section
7438 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7439 "\t.section .ftext,\"ax\"\n\t.core");
7443 #include "gt-mep.h"