Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / config / mn10300 / mn10300.c
bloba4e0bc432df80478b0ff8e173d609055c250f22a
1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "function.h"
40 #include "obstack.h"
41 #include "diagnostic-core.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "df.h"
47 /* This is used by GOTaddr2picreg to uniquely identify
48 UNSPEC_INT_LABELs. */
49 int mn10300_unspec_int_label_counter;
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label;
57 /* The selected processor. */
58 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
60 /* Processor type to select for tuning. */
61 static const char * mn10300_tune_string = NULL;
63 /* Selected processor type for tuning. */
64 enum processor_type mn10300_tune_cpu = PROCESSOR_DEFAULT;
66 /* The size of the callee register save area. Right now we save everything
67 on entry since it costs us nothing in code size. It does cost us from a
68 speed standpoint, so we want to optimize this sooner or later. */
69 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
70 + 4 * df_regs_ever_live_p (3) \
71 + 4 * df_regs_ever_live_p (6) \
72 + 4 * df_regs_ever_live_p (7) \
73 + 16 * (df_regs_ever_live_p (14) \
74 || df_regs_ever_live_p (15) \
75 || df_regs_ever_live_p (16) \
76 || df_regs_ever_live_p (17)))
78 static int mn10300_address_cost (rtx, bool);
80 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
81 static const struct default_options mn10300_option_optimization_table[] =
83 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
84 { OPT_LEVELS_NONE, 0, NULL, 0 }
87 /* Implement TARGET_HANDLE_OPTION. */
89 static bool
90 mn10300_handle_option (size_t code,
91 const char *arg ATTRIBUTE_UNUSED,
92 int value)
94 switch (code)
96 case OPT_mam33:
97 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
98 return true;
100 case OPT_mam33_2:
101 mn10300_processor = (value
102 ? PROCESSOR_AM33_2
103 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
104 return true;
106 case OPT_mam34:
107 mn10300_processor = (value ? PROCESSOR_AM34 : PROCESSOR_DEFAULT);
108 return true;
110 case OPT_mtune_:
111 mn10300_tune_string = arg;
112 return true;
114 default:
115 return true;
119 /* Implement TARGET_OPTION_OVERRIDE. */
121 static void
122 mn10300_option_override (void)
124 if (TARGET_AM33)
125 target_flags &= ~MASK_MULT_BUG;
126 else
128 /* Disable scheduling for the MN10300 as we do
129 not have timing information available for it. */
130 flag_schedule_insns = 0;
131 flag_schedule_insns_after_reload = 0;
134 if (mn10300_tune_string)
136 if (strcasecmp (mn10300_tune_string, "mn10300") == 0)
137 mn10300_tune_cpu = PROCESSOR_MN10300;
138 else if (strcasecmp (mn10300_tune_string, "am33") == 0)
139 mn10300_tune_cpu = PROCESSOR_AM33;
140 else if (strcasecmp (mn10300_tune_string, "am33-2") == 0)
141 mn10300_tune_cpu = PROCESSOR_AM33_2;
142 else if (strcasecmp (mn10300_tune_string, "am34") == 0)
143 mn10300_tune_cpu = PROCESSOR_AM34;
144 else
145 error ("-mtune= expects mn10300, am33, am33-2, or am34");
149 static void
150 mn10300_file_start (void)
152 default_file_start ();
154 if (TARGET_AM33_2)
155 fprintf (asm_out_file, "\t.am33_2\n");
156 else if (TARGET_AM33)
157 fprintf (asm_out_file, "\t.am33\n");
160 /* Print operand X using operand code CODE to assembly language output file
161 FILE. */
163 void
164 mn10300_print_operand (FILE *file, rtx x, int code)
166 switch (code)
168 case 'b':
169 case 'B':
170 if (GET_MODE (XEXP (x, 0)) == CC_FLOATmode)
172 switch (code == 'b' ? GET_CODE (x)
173 : reverse_condition_maybe_unordered (GET_CODE (x)))
175 case NE:
176 fprintf (file, "ne");
177 break;
178 case EQ:
179 fprintf (file, "eq");
180 break;
181 case GE:
182 fprintf (file, "ge");
183 break;
184 case GT:
185 fprintf (file, "gt");
186 break;
187 case LE:
188 fprintf (file, "le");
189 break;
190 case LT:
191 fprintf (file, "lt");
192 break;
193 case ORDERED:
194 fprintf (file, "lge");
195 break;
196 case UNORDERED:
197 fprintf (file, "uo");
198 break;
199 case LTGT:
200 fprintf (file, "lg");
201 break;
202 case UNEQ:
203 fprintf (file, "ue");
204 break;
205 case UNGE:
206 fprintf (file, "uge");
207 break;
208 case UNGT:
209 fprintf (file, "ug");
210 break;
211 case UNLE:
212 fprintf (file, "ule");
213 break;
214 case UNLT:
215 fprintf (file, "ul");
216 break;
217 default:
218 gcc_unreachable ();
220 break;
222 /* These are normal and reversed branches. */
223 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
225 case NE:
226 fprintf (file, "ne");
227 break;
228 case EQ:
229 fprintf (file, "eq");
230 break;
231 case GE:
232 fprintf (file, "ge");
233 break;
234 case GT:
235 fprintf (file, "gt");
236 break;
237 case LE:
238 fprintf (file, "le");
239 break;
240 case LT:
241 fprintf (file, "lt");
242 break;
243 case GEU:
244 fprintf (file, "cc");
245 break;
246 case GTU:
247 fprintf (file, "hi");
248 break;
249 case LEU:
250 fprintf (file, "ls");
251 break;
252 case LTU:
253 fprintf (file, "cs");
254 break;
255 default:
256 gcc_unreachable ();
258 break;
259 case 'C':
260 /* This is used for the operand to a call instruction;
261 if it's a REG, enclose it in parens, else output
262 the operand normally. */
263 if (REG_P (x))
265 fputc ('(', file);
266 mn10300_print_operand (file, x, 0);
267 fputc (')', file);
269 else
270 mn10300_print_operand (file, x, 0);
271 break;
273 case 'D':
274 switch (GET_CODE (x))
276 case MEM:
277 fputc ('(', file);
278 output_address (XEXP (x, 0));
279 fputc (')', file);
280 break;
282 case REG:
283 fprintf (file, "fd%d", REGNO (x) - 18);
284 break;
286 default:
287 gcc_unreachable ();
289 break;
291 /* These are the least significant word in a 64bit value. */
292 case 'L':
293 switch (GET_CODE (x))
295 case MEM:
296 fputc ('(', file);
297 output_address (XEXP (x, 0));
298 fputc (')', file);
299 break;
301 case REG:
302 fprintf (file, "%s", reg_names[REGNO (x)]);
303 break;
305 case SUBREG:
306 fprintf (file, "%s", reg_names[subreg_regno (x)]);
307 break;
309 case CONST_DOUBLE:
311 long val[2];
312 REAL_VALUE_TYPE rv;
314 switch (GET_MODE (x))
316 case DFmode:
317 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
318 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
319 fprintf (file, "0x%lx", val[0]);
320 break;;
321 case SFmode:
322 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
323 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
324 fprintf (file, "0x%lx", val[0]);
325 break;;
326 case VOIDmode:
327 case DImode:
328 mn10300_print_operand_address (file,
329 GEN_INT (CONST_DOUBLE_LOW (x)));
330 break;
331 default:
332 break;
334 break;
337 case CONST_INT:
339 rtx low, high;
340 split_double (x, &low, &high);
341 fprintf (file, "%ld", (long)INTVAL (low));
342 break;
345 default:
346 gcc_unreachable ();
348 break;
350 /* Similarly, but for the most significant word. */
351 case 'H':
352 switch (GET_CODE (x))
354 case MEM:
355 fputc ('(', file);
356 x = adjust_address (x, SImode, 4);
357 output_address (XEXP (x, 0));
358 fputc (')', file);
359 break;
361 case REG:
362 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
363 break;
365 case SUBREG:
366 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
367 break;
369 case CONST_DOUBLE:
371 long val[2];
372 REAL_VALUE_TYPE rv;
374 switch (GET_MODE (x))
376 case DFmode:
377 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
378 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
379 fprintf (file, "0x%lx", val[1]);
380 break;;
381 case SFmode:
382 gcc_unreachable ();
383 case VOIDmode:
384 case DImode:
385 mn10300_print_operand_address (file,
386 GEN_INT (CONST_DOUBLE_HIGH (x)));
387 break;
388 default:
389 break;
391 break;
394 case CONST_INT:
396 rtx low, high;
397 split_double (x, &low, &high);
398 fprintf (file, "%ld", (long)INTVAL (high));
399 break;
402 default:
403 gcc_unreachable ();
405 break;
407 case 'A':
408 fputc ('(', file);
409 if (REG_P (XEXP (x, 0)))
410 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
411 else
412 output_address (XEXP (x, 0));
413 fputc (')', file);
414 break;
416 case 'N':
417 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
418 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
419 break;
421 case 'U':
422 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
423 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
424 break;
426 /* For shift counts. The hardware ignores the upper bits of
427 any immediate, but the assembler will flag an out of range
428 shift count as an error. So we mask off the high bits
429 of the immediate here. */
430 case 'S':
431 if (CONST_INT_P (x))
433 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
434 break;
436 /* FALL THROUGH */
438 default:
439 switch (GET_CODE (x))
441 case MEM:
442 fputc ('(', file);
443 output_address (XEXP (x, 0));
444 fputc (')', file);
445 break;
447 case PLUS:
448 output_address (x);
449 break;
451 case REG:
452 fprintf (file, "%s", reg_names[REGNO (x)]);
453 break;
455 case SUBREG:
456 fprintf (file, "%s", reg_names[subreg_regno (x)]);
457 break;
459 /* This will only be single precision.... */
460 case CONST_DOUBLE:
462 unsigned long val;
463 REAL_VALUE_TYPE rv;
465 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
466 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
467 fprintf (file, "0x%lx", val);
468 break;
471 case CONST_INT:
472 case SYMBOL_REF:
473 case CONST:
474 case LABEL_REF:
475 case CODE_LABEL:
476 case UNSPEC:
477 mn10300_print_operand_address (file, x);
478 break;
479 default:
480 gcc_unreachable ();
482 break;
486 /* Output assembly language output for the address ADDR to FILE. */
488 void
489 mn10300_print_operand_address (FILE *file, rtx addr)
491 switch (GET_CODE (addr))
493 case POST_INC:
494 mn10300_print_operand_address (file, XEXP (addr, 0));
495 fputc ('+', file);
496 break;
497 case REG:
498 mn10300_print_operand (file, addr, 0);
499 break;
500 case PLUS:
502 rtx base, index;
503 if (REG_P (XEXP (addr, 0))
504 && REG_OK_FOR_BASE_P (XEXP (addr, 0)))
505 base = XEXP (addr, 0), index = XEXP (addr, 1);
506 else if (REG_P (XEXP (addr, 1))
507 && REG_OK_FOR_BASE_P (XEXP (addr, 1)))
508 base = XEXP (addr, 1), index = XEXP (addr, 0);
509 else
510 gcc_unreachable ();
511 mn10300_print_operand (file, index, 0);
512 fputc (',', file);
513 mn10300_print_operand (file, base, 0);;
514 break;
516 case SYMBOL_REF:
517 output_addr_const (file, addr);
518 break;
519 default:
520 output_addr_const (file, addr);
521 break;
525 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA.
527 Used for PIC-specific UNSPECs. */
529 static bool
530 mn10300_asm_output_addr_const_extra (FILE *file, rtx x)
532 if (GET_CODE (x) == UNSPEC)
534 switch (XINT (x, 1))
536 case UNSPEC_INT_LABEL:
537 asm_fprintf (file, ".%LLIL" HOST_WIDE_INT_PRINT_DEC,
538 INTVAL (XVECEXP (x, 0, 0)));
539 break;
540 case UNSPEC_PIC:
541 /* GLOBAL_OFFSET_TABLE or local symbols, no suffix. */
542 output_addr_const (file, XVECEXP (x, 0, 0));
543 break;
544 case UNSPEC_GOT:
545 output_addr_const (file, XVECEXP (x, 0, 0));
546 fputs ("@GOT", file);
547 break;
548 case UNSPEC_GOTOFF:
549 output_addr_const (file, XVECEXP (x, 0, 0));
550 fputs ("@GOTOFF", file);
551 break;
552 case UNSPEC_PLT:
553 output_addr_const (file, XVECEXP (x, 0, 0));
554 fputs ("@PLT", file);
555 break;
556 case UNSPEC_GOTSYM_OFF:
557 assemble_name (file, GOT_SYMBOL_NAME);
558 fputs ("-(", file);
559 output_addr_const (file, XVECEXP (x, 0, 0));
560 fputs ("-.)", file);
561 break;
562 default:
563 return false;
565 return true;
567 else
568 return false;
571 /* Count the number of FP registers that have to be saved. */
572 static int
573 fp_regs_to_save (void)
575 int i, n = 0;
577 if (! TARGET_AM33_2)
578 return 0;
580 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
581 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
582 ++n;
584 return n;
587 /* Print a set of registers in the format required by "movm" and "ret".
588 Register K is saved if bit K of MASK is set. The data and address
589 registers can be stored individually, but the extended registers cannot.
590 We assume that the mask already takes that into account. For instance,
591 bits 14 to 17 must have the same value. */
593 void
594 mn10300_print_reg_list (FILE *file, int mask)
596 int need_comma;
597 int i;
599 need_comma = 0;
600 fputc ('[', file);
602 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
603 if ((mask & (1 << i)) != 0)
605 if (need_comma)
606 fputc (',', file);
607 fputs (reg_names [i], file);
608 need_comma = 1;
611 if ((mask & 0x3c000) != 0)
613 gcc_assert ((mask & 0x3c000) == 0x3c000);
614 if (need_comma)
615 fputc (',', file);
616 fputs ("exreg1", file);
617 need_comma = 1;
620 fputc (']', file);
624 mn10300_can_use_return_insn (void)
626 /* size includes the fixed stack space needed for function calls. */
627 int size = get_frame_size () + crtl->outgoing_args_size;
629 /* And space for the return pointer. */
630 size += crtl->outgoing_args_size ? 4 : 0;
632 return (reload_completed
633 && size == 0
634 && !df_regs_ever_live_p (2)
635 && !df_regs_ever_live_p (3)
636 && !df_regs_ever_live_p (6)
637 && !df_regs_ever_live_p (7)
638 && !df_regs_ever_live_p (14)
639 && !df_regs_ever_live_p (15)
640 && !df_regs_ever_live_p (16)
641 && !df_regs_ever_live_p (17)
642 && fp_regs_to_save () == 0
643 && !frame_pointer_needed);
646 /* Returns the set of live, callee-saved registers as a bitmask. The
647 callee-saved extended registers cannot be stored individually, so
648 all of them will be included in the mask if any one of them is used. */
651 mn10300_get_live_callee_saved_regs (void)
653 int mask;
654 int i;
656 mask = 0;
657 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
658 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
659 mask |= (1 << i);
660 if ((mask & 0x3c000) != 0)
661 mask |= 0x3c000;
663 return mask;
666 static rtx
667 F (rtx r)
669 RTX_FRAME_RELATED_P (r) = 1;
670 return r;
673 /* Generate an instruction that pushes several registers onto the stack.
674 Register K will be saved if bit K in MASK is set. The function does
675 nothing if MASK is zero.
677 To be compatible with the "movm" instruction, the lowest-numbered
678 register must be stored in the lowest slot. If MASK is the set
679 { R1,...,RN }, where R1...RN are ordered least first, the generated
680 instruction will have the form:
682 (parallel
683 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
684 (set (mem:SI (plus:SI (reg:SI 9)
685 (const_int -1*4)))
686 (reg:SI RN))
688 (set (mem:SI (plus:SI (reg:SI 9)
689 (const_int -N*4)))
690 (reg:SI R1))) */
692 void
693 mn10300_gen_multiple_store (int mask)
695 if (mask != 0)
697 int i;
698 int count;
699 rtx par;
700 int pari;
702 /* Count how many registers need to be saved. */
703 count = 0;
704 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
705 if ((mask & (1 << i)) != 0)
706 count += 1;
708 /* We need one PARALLEL element to update the stack pointer and
709 an additional element for each register that is stored. */
710 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + 1));
712 /* Create the instruction that updates the stack pointer. */
713 XVECEXP (par, 0, 0)
714 = F (gen_rtx_SET (SImode,
715 stack_pointer_rtx,
716 gen_rtx_PLUS (SImode,
717 stack_pointer_rtx,
718 GEN_INT (-count * 4))));
720 /* Create each store. */
721 pari = 1;
722 for (i = LAST_EXTENDED_REGNUM; i >= 0; i--)
723 if ((mask & (1 << i)) != 0)
725 rtx address = gen_rtx_PLUS (SImode,
726 stack_pointer_rtx,
727 GEN_INT (-pari * 4));
728 XVECEXP(par, 0, pari)
729 = F (gen_rtx_SET (VOIDmode,
730 gen_rtx_MEM (SImode, address),
731 gen_rtx_REG (SImode, i)));
732 pari += 1;
735 F (emit_insn (par));
739 void
740 mn10300_expand_prologue (void)
742 HOST_WIDE_INT size;
744 /* SIZE includes the fixed stack space needed for function calls. */
745 size = get_frame_size () + crtl->outgoing_args_size;
746 size += (crtl->outgoing_args_size ? 4 : 0);
748 /* If we use any of the callee-saved registers, save them now. */
749 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
751 if (TARGET_AM33_2 && fp_regs_to_save ())
753 int num_regs_to_save = fp_regs_to_save (), i;
754 HOST_WIDE_INT xsize;
755 enum
757 save_sp_merge,
758 save_sp_no_merge,
759 save_sp_partial_merge,
760 save_a0_merge,
761 save_a0_no_merge
762 } strategy;
763 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
764 rtx reg;
766 /* We have several different strategies to save FP registers.
767 We can store them using SP offsets, which is beneficial if
768 there are just a few registers to save, or we can use `a0' in
769 post-increment mode (`a0' is the only call-clobbered address
770 register that is never used to pass information to a
771 function). Furthermore, if we don't need a frame pointer, we
772 can merge the two SP adds into a single one, but this isn't
773 always beneficial; sometimes we can just split the two adds
774 so that we don't exceed a 16-bit constant size. The code
775 below will select which strategy to use, so as to generate
776 smallest code. Ties are broken in favor or shorter sequences
777 (in terms of number of instructions). */
779 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
780 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
781 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
782 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
784 /* We add 0 * (S) in two places to promote to the type of S,
785 so that all arms of the conditional have the same type. */
786 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
787 (((S) >= (L)) ? 0 * (S) + (SIZE1) * (N) \
788 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
789 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
790 : 0 * (S) + (ELSE))
791 #define SIZE_FMOV_SP_(S,N) \
792 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
793 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
794 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
795 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
797 /* Consider alternative save_sp_merge only if we don't need the
798 frame pointer and size is nonzero. */
799 if (! frame_pointer_needed && size)
801 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
802 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
803 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
804 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
806 if (this_strategy_size < strategy_size)
808 strategy = save_sp_merge;
809 strategy_size = this_strategy_size;
813 /* Consider alternative save_sp_no_merge unconditionally. */
814 /* Insn: add -4 * num_regs_to_save, sp. */
815 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
816 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
817 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
818 if (size)
820 /* Insn: add -size, sp. */
821 this_strategy_size += SIZE_ADD_SP (-size);
824 if (this_strategy_size < strategy_size)
826 strategy = save_sp_no_merge;
827 strategy_size = this_strategy_size;
830 /* Consider alternative save_sp_partial_merge only if we don't
831 need a frame pointer and size is reasonably large. */
832 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
834 /* Insn: add -128, sp. */
835 this_strategy_size = SIZE_ADD_SP (-128);
836 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
837 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
838 num_regs_to_save);
839 if (size)
841 /* Insn: add 128-size, sp. */
842 this_strategy_size += SIZE_ADD_SP (128 - size);
845 if (this_strategy_size < strategy_size)
847 strategy = save_sp_partial_merge;
848 strategy_size = this_strategy_size;
852 /* Consider alternative save_a0_merge only if we don't need a
853 frame pointer, size is nonzero and the user hasn't
854 changed the calling conventions of a0. */
855 if (! frame_pointer_needed && size
856 && call_really_used_regs [FIRST_ADDRESS_REGNUM]
857 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
859 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
860 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
861 /* Insn: mov sp, a0. */
862 this_strategy_size++;
863 if (size)
865 /* Insn: add size, a0. */
866 this_strategy_size += SIZE_ADD_AX (size);
868 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
869 this_strategy_size += 3 * num_regs_to_save;
871 if (this_strategy_size < strategy_size)
873 strategy = save_a0_merge;
874 strategy_size = this_strategy_size;
878 /* Consider alternative save_a0_no_merge if the user hasn't
879 changed the calling conventions of a0. */
880 if (call_really_used_regs [FIRST_ADDRESS_REGNUM]
881 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
883 /* Insn: add -4 * num_regs_to_save, sp. */
884 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
885 /* Insn: mov sp, a0. */
886 this_strategy_size++;
887 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
888 this_strategy_size += 3 * num_regs_to_save;
889 if (size)
891 /* Insn: add -size, sp. */
892 this_strategy_size += SIZE_ADD_SP (-size);
895 if (this_strategy_size < strategy_size)
897 strategy = save_a0_no_merge;
898 strategy_size = this_strategy_size;
902 /* Emit the initial SP add, common to all strategies. */
903 switch (strategy)
905 case save_sp_no_merge:
906 case save_a0_no_merge:
907 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
908 stack_pointer_rtx,
909 GEN_INT (-4 * num_regs_to_save))));
910 xsize = 0;
911 break;
913 case save_sp_partial_merge:
914 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
915 stack_pointer_rtx,
916 GEN_INT (-128))));
917 xsize = 128 - 4 * num_regs_to_save;
918 size -= xsize;
919 break;
921 case save_sp_merge:
922 case save_a0_merge:
923 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
924 stack_pointer_rtx,
925 GEN_INT (-(size + 4 * num_regs_to_save)))));
926 /* We'll have to adjust FP register saves according to the
927 frame size. */
928 xsize = size;
929 /* Since we've already created the stack frame, don't do it
930 again at the end of the function. */
931 size = 0;
932 break;
934 default:
935 gcc_unreachable ();
938 /* Now prepare register a0, if we have decided to use it. */
939 switch (strategy)
941 case save_sp_merge:
942 case save_sp_no_merge:
943 case save_sp_partial_merge:
944 reg = 0;
945 break;
947 case save_a0_merge:
948 case save_a0_no_merge:
949 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
950 F (emit_insn (gen_movsi (reg, stack_pointer_rtx)));
951 if (xsize)
952 F (emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize))));
953 reg = gen_rtx_POST_INC (SImode, reg);
954 break;
956 default:
957 gcc_unreachable ();
960 /* Now actually save the FP registers. */
961 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
962 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
964 rtx addr;
966 if (reg)
967 addr = reg;
968 else
970 /* If we aren't using `a0', use an SP offset. */
971 if (xsize)
973 addr = gen_rtx_PLUS (SImode,
974 stack_pointer_rtx,
975 GEN_INT (xsize));
977 else
978 addr = stack_pointer_rtx;
980 xsize += 4;
983 F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode, addr),
984 gen_rtx_REG (SFmode, i))));
988 /* Now put the frame pointer into the frame pointer register. */
989 if (frame_pointer_needed)
990 F (emit_move_insn (frame_pointer_rtx, stack_pointer_rtx));
992 /* Allocate stack for this frame. */
993 if (size)
994 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
995 stack_pointer_rtx,
996 GEN_INT (-size))));
998 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
999 emit_insn (gen_GOTaddr2picreg ());
1002 void
1003 mn10300_expand_epilogue (void)
1005 HOST_WIDE_INT size;
1007 /* SIZE includes the fixed stack space needed for function calls. */
1008 size = get_frame_size () + crtl->outgoing_args_size;
1009 size += (crtl->outgoing_args_size ? 4 : 0);
1011 if (TARGET_AM33_2 && fp_regs_to_save ())
1013 int num_regs_to_save = fp_regs_to_save (), i;
1014 rtx reg = 0;
1016 /* We have several options to restore FP registers. We could
1017 load them from SP offsets, but, if there are enough FP
1018 registers to restore, we win if we use a post-increment
1019 addressing mode. */
1021 /* If we have a frame pointer, it's the best option, because we
1022 already know it has the value we want. */
1023 if (frame_pointer_needed)
1024 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
1025 /* Otherwise, we may use `a1', since it's call-clobbered and
1026 it's never used for return values. But only do so if it's
1027 smaller than using SP offsets. */
1028 else
1030 enum { restore_sp_post_adjust,
1031 restore_sp_pre_adjust,
1032 restore_sp_partial_adjust,
1033 restore_a1 } strategy;
1034 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
1036 /* Consider using sp offsets before adjusting sp. */
1037 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1038 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
1039 /* If size is too large, we'll have to adjust SP with an
1040 add. */
1041 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1043 /* Insn: add size + 4 * num_regs_to_save, sp. */
1044 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
1046 /* If we don't have to restore any non-FP registers,
1047 we'll be able to save one byte by using rets. */
1048 if (! REG_SAVE_BYTES)
1049 this_strategy_size--;
1051 if (this_strategy_size < strategy_size)
1053 strategy = restore_sp_post_adjust;
1054 strategy_size = this_strategy_size;
1057 /* Consider using sp offsets after adjusting sp. */
1058 /* Insn: add size, sp. */
1059 this_strategy_size = SIZE_ADD_SP (size);
1060 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1061 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1062 /* We're going to use ret to release the FP registers
1063 save area, so, no savings. */
1065 if (this_strategy_size < strategy_size)
1067 strategy = restore_sp_pre_adjust;
1068 strategy_size = this_strategy_size;
1071 /* Consider using sp offsets after partially adjusting sp.
1072 When size is close to 32Kb, we may be able to adjust SP
1073 with an imm16 add instruction while still using fmov
1074 (d8,sp). */
1075 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1077 /* Insn: add size + 4 * num_regs_to_save
1078 + REG_SAVE_BYTES - 252,sp. */
1079 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1080 + REG_SAVE_BYTES - 252);
1081 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1082 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1083 - 4 * num_regs_to_save,
1084 num_regs_to_save);
1085 /* We're going to use ret to release the FP registers
1086 save area, so, no savings. */
1088 if (this_strategy_size < strategy_size)
1090 strategy = restore_sp_partial_adjust;
1091 strategy_size = this_strategy_size;
1095 /* Consider using a1 in post-increment mode, as long as the
1096 user hasn't changed the calling conventions of a1. */
1097 if (call_really_used_regs [FIRST_ADDRESS_REGNUM + 1]
1098 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1100 /* Insn: mov sp,a1. */
1101 this_strategy_size = 1;
1102 if (size)
1104 /* Insn: add size,a1. */
1105 this_strategy_size += SIZE_ADD_AX (size);
1107 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1108 this_strategy_size += 3 * num_regs_to_save;
1109 /* If size is large enough, we may be able to save a
1110 couple of bytes. */
1111 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1113 /* Insn: mov a1,sp. */
1114 this_strategy_size += 2;
1116 /* If we don't have to restore any non-FP registers,
1117 we'll be able to save one byte by using rets. */
1118 if (! REG_SAVE_BYTES)
1119 this_strategy_size--;
1121 if (this_strategy_size < strategy_size)
1123 strategy = restore_a1;
1124 strategy_size = this_strategy_size;
1128 switch (strategy)
1130 case restore_sp_post_adjust:
1131 break;
1133 case restore_sp_pre_adjust:
1134 emit_insn (gen_addsi3 (stack_pointer_rtx,
1135 stack_pointer_rtx,
1136 GEN_INT (size)));
1137 size = 0;
1138 break;
1140 case restore_sp_partial_adjust:
1141 emit_insn (gen_addsi3 (stack_pointer_rtx,
1142 stack_pointer_rtx,
1143 GEN_INT (size + 4 * num_regs_to_save
1144 + REG_SAVE_BYTES - 252)));
1145 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1146 break;
1148 case restore_a1:
1149 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1150 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1151 if (size)
1152 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1153 break;
1155 default:
1156 gcc_unreachable ();
1160 /* Adjust the selected register, if any, for post-increment. */
1161 if (reg)
1162 reg = gen_rtx_POST_INC (SImode, reg);
1164 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1165 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
1167 rtx addr;
1169 if (reg)
1170 addr = reg;
1171 else if (size)
1173 /* If we aren't using a post-increment register, use an
1174 SP offset. */
1175 addr = gen_rtx_PLUS (SImode,
1176 stack_pointer_rtx,
1177 GEN_INT (size));
1179 else
1180 addr = stack_pointer_rtx;
1182 size += 4;
1184 emit_insn (gen_movsf (gen_rtx_REG (SFmode, i),
1185 gen_rtx_MEM (SFmode, addr)));
1188 /* If we were using the restore_a1 strategy and the number of
1189 bytes to be released won't fit in the `ret' byte, copy `a1'
1190 to `sp', to avoid having to use `add' to adjust it. */
1191 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1193 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1194 size = 0;
1198 /* Maybe cut back the stack, except for the register save area.
1200 If the frame pointer exists, then use the frame pointer to
1201 cut back the stack.
1203 If the stack size + register save area is more than 255 bytes,
1204 then the stack must be cut back here since the size + register
1205 save size is too big for a ret/retf instruction.
1207 Else leave it alone, it will be cut back as part of the
1208 ret/retf instruction, or there wasn't any stack to begin with.
1210 Under no circumstances should the register save area be
1211 deallocated here, that would leave a window where an interrupt
1212 could occur and trash the register save area. */
1213 if (frame_pointer_needed)
1215 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1216 size = 0;
1218 else if (size + REG_SAVE_BYTES > 255)
1220 emit_insn (gen_addsi3 (stack_pointer_rtx,
1221 stack_pointer_rtx,
1222 GEN_INT (size)));
1223 size = 0;
1226 /* Adjust the stack and restore callee-saved registers, if any. */
1227 if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1228 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1229 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1230 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1231 || frame_pointer_needed)
1232 emit_jump_insn (gen_return_internal_regs
1233 (GEN_INT (size + REG_SAVE_BYTES)));
1234 else
1235 emit_jump_insn (gen_return_internal ());
1238 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1239 This function is for MATCH_PARALLEL and so assumes OP is known to be
1240 parallel. If OP is a multiple store, return a mask indicating which
1241 registers it saves. Return 0 otherwise. */
1244 mn10300_store_multiple_operation (rtx op,
1245 enum machine_mode mode ATTRIBUTE_UNUSED)
1247 int count;
1248 int mask;
1249 int i;
1250 unsigned int last;
1251 rtx elt;
1253 count = XVECLEN (op, 0);
1254 if (count < 2)
1255 return 0;
1257 /* Check that first instruction has the form (set (sp) (plus A B)) */
1258 elt = XVECEXP (op, 0, 0);
1259 if (GET_CODE (elt) != SET
1260 || (! REG_P (SET_DEST (elt)))
1261 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1262 || GET_CODE (SET_SRC (elt)) != PLUS)
1263 return 0;
1265 /* Check that A is the stack pointer and B is the expected stack size.
1266 For OP to match, each subsequent instruction should push a word onto
1267 the stack. We therefore expect the first instruction to create
1268 COUNT-1 stack slots. */
1269 elt = SET_SRC (elt);
1270 if ((! REG_P (XEXP (elt, 0)))
1271 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1272 || (! CONST_INT_P (XEXP (elt, 1)))
1273 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1274 return 0;
1276 /* Now go through the rest of the vector elements. They must be
1277 ordered so that the first instruction stores the highest-numbered
1278 register to the highest stack slot and that subsequent instructions
1279 store a lower-numbered register to the slot below.
1281 LAST keeps track of the smallest-numbered register stored so far.
1282 MASK is the set of stored registers. */
1283 last = LAST_EXTENDED_REGNUM + 1;
1284 mask = 0;
1285 for (i = 1; i < count; i++)
1287 /* Check that element i is a (set (mem M) R) and that R is valid. */
1288 elt = XVECEXP (op, 0, i);
1289 if (GET_CODE (elt) != SET
1290 || (! MEM_P (SET_DEST (elt)))
1291 || (! REG_P (SET_SRC (elt)))
1292 || REGNO (SET_SRC (elt)) >= last)
1293 return 0;
1295 /* R was OK, so provisionally add it to MASK. We return 0 in any
1296 case if the rest of the instruction has a flaw. */
1297 last = REGNO (SET_SRC (elt));
1298 mask |= (1 << last);
1300 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1301 elt = XEXP (SET_DEST (elt), 0);
1302 if (GET_CODE (elt) != PLUS
1303 || (! REG_P (XEXP (elt, 0)))
1304 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1305 || (! CONST_INT_P (XEXP (elt, 1)))
1306 || INTVAL (XEXP (elt, 1)) != -i * 4)
1307 return 0;
1310 /* All or none of the callee-saved extended registers must be in the set. */
1311 if ((mask & 0x3c000) != 0
1312 && (mask & 0x3c000) != 0x3c000)
1313 return 0;
1315 return mask;
1318 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1320 static reg_class_t
1321 mn10300_preferred_reload_class (rtx x, reg_class_t rclass)
1323 if (x == stack_pointer_rtx && rclass != SP_REGS)
1324 return ADDRESS_OR_EXTENDED_REGS;
1325 else if (MEM_P (x)
1326 || (REG_P (x)
1327 && !HARD_REGISTER_P (x))
1328 || (GET_CODE (x) == SUBREG
1329 && REG_P (SUBREG_REG (x))
1330 && !HARD_REGISTER_P (SUBREG_REG (x))))
1331 return LIMIT_RELOAD_CLASS (GET_MODE (x), rclass);
1332 else
1333 return rclass;
1336 /* Implement TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
1338 static reg_class_t
1339 mn10300_preferred_output_reload_class (rtx x, reg_class_t rclass)
1341 if (x == stack_pointer_rtx && rclass != SP_REGS)
1342 return ADDRESS_OR_EXTENDED_REGS;
1344 return rclass;
1347 /* What (if any) secondary registers are needed to move IN with mode
1348 MODE into a register in register class RCLASS.
1350 We might be able to simplify this. */
1352 enum reg_class
1353 mn10300_secondary_reload_class (enum reg_class rclass, enum machine_mode mode,
1354 rtx in)
1356 rtx inner = in;
1358 /* Strip off any SUBREG expressions from IN. Basically we want
1359 to know if IN is a pseudo or (subreg (pseudo)) as those can
1360 turn into MEMs during reload. */
1361 while (GET_CODE (inner) == SUBREG)
1362 inner = SUBREG_REG (inner);
1364 /* Memory loads less than a full word wide can't have an
1365 address or stack pointer destination. They must use
1366 a data register as an intermediate register. */
1367 if ((MEM_P (in)
1368 || (REG_P (inner)
1369 && REGNO (inner) >= FIRST_PSEUDO_REGISTER))
1370 && (mode == QImode || mode == HImode)
1371 && (rclass == ADDRESS_REGS || rclass == SP_REGS
1372 || rclass == SP_OR_ADDRESS_REGS))
1374 if (TARGET_AM33)
1375 return DATA_OR_EXTENDED_REGS;
1376 return DATA_REGS;
1379 /* We can't directly load sp + const_int into a data register;
1380 we must use an address register as an intermediate. */
1381 if (rclass != SP_REGS
1382 && rclass != ADDRESS_REGS
1383 && rclass != SP_OR_ADDRESS_REGS
1384 && rclass != SP_OR_EXTENDED_REGS
1385 && rclass != ADDRESS_OR_EXTENDED_REGS
1386 && rclass != SP_OR_ADDRESS_OR_EXTENDED_REGS
1387 && (in == stack_pointer_rtx
1388 || (GET_CODE (in) == PLUS
1389 && (XEXP (in, 0) == stack_pointer_rtx
1390 || XEXP (in, 1) == stack_pointer_rtx))))
1391 return ADDRESS_REGS;
1393 if (TARGET_AM33_2
1394 && rclass == FP_REGS)
1396 /* We can't load directly into an FP register from a
1397 constant address. */
1398 if (MEM_P (in)
1399 && CONSTANT_ADDRESS_P (XEXP (in, 0)))
1400 return DATA_OR_EXTENDED_REGS;
1402 /* Handle case were a pseudo may not get a hard register
1403 but has an equivalent memory location defined. */
1404 if (REG_P (inner)
1405 && REGNO (inner) >= FIRST_PSEUDO_REGISTER
1406 && reg_equiv_mem [REGNO (inner)]
1407 && CONSTANT_ADDRESS_P (XEXP (reg_equiv_mem [REGNO (inner)], 0)))
1408 return DATA_OR_EXTENDED_REGS;
1411 /* Otherwise assume no secondary reloads are needed. */
1412 return NO_REGS;
1416 mn10300_initial_offset (int from, int to)
1418 /* The difference between the argument pointer and the frame pointer
1419 is the size of the callee register save area. */
1420 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1422 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1423 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1424 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1425 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1426 || fp_regs_to_save ()
1427 || frame_pointer_needed)
1428 return REG_SAVE_BYTES
1429 + 4 * fp_regs_to_save ();
1430 else
1431 return 0;
1434 /* The difference between the argument pointer and the stack pointer is
1435 the sum of the size of this function's frame, the callee register save
1436 area, and the fixed stack space needed for function calls (if any). */
1437 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1439 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1440 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1441 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1442 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1443 || fp_regs_to_save ()
1444 || frame_pointer_needed)
1445 return (get_frame_size () + REG_SAVE_BYTES
1446 + 4 * fp_regs_to_save ()
1447 + (crtl->outgoing_args_size
1448 ? crtl->outgoing_args_size + 4 : 0));
1449 else
1450 return (get_frame_size ()
1451 + (crtl->outgoing_args_size
1452 ? crtl->outgoing_args_size + 4 : 0));
1455 /* The difference between the frame pointer and stack pointer is the sum
1456 of the size of this function's frame and the fixed stack space needed
1457 for function calls (if any). */
1458 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1459 return (get_frame_size ()
1460 + (crtl->outgoing_args_size
1461 ? crtl->outgoing_args_size + 4 : 0));
1463 gcc_unreachable ();
1466 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1468 static bool
1469 mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1471 /* Return values > 8 bytes in length in memory. */
1472 return (int_size_in_bytes (type) > 8
1473 || int_size_in_bytes (type) == 0
1474 || TYPE_MODE (type) == BLKmode);
1477 /* Flush the argument registers to the stack for a stdarg function;
1478 return the new argument pointer. */
1479 static rtx
1480 mn10300_builtin_saveregs (void)
1482 rtx offset, mem;
1483 tree fntype = TREE_TYPE (current_function_decl);
1484 int argadj = ((!stdarg_p (fntype))
1485 ? UNITS_PER_WORD : 0);
1486 alias_set_type set = get_varargs_alias_set ();
1488 if (argadj)
1489 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
1490 else
1491 offset = crtl->args.arg_offset_rtx;
1493 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
1494 set_mem_alias_set (mem, set);
1495 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1497 mem = gen_rtx_MEM (SImode,
1498 plus_constant (crtl->args.internal_arg_pointer, 4));
1499 set_mem_alias_set (mem, set);
1500 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1502 return copy_to_reg (expand_binop (Pmode, add_optab,
1503 crtl->args.internal_arg_pointer,
1504 offset, 0, 0, OPTAB_LIB_WIDEN));
1507 static void
1508 mn10300_va_start (tree valist, rtx nextarg)
1510 nextarg = expand_builtin_saveregs ();
1511 std_expand_builtin_va_start (valist, nextarg);
1514 /* Return true when a parameter should be passed by reference. */
1516 static bool
1517 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1518 enum machine_mode mode, const_tree type,
1519 bool named ATTRIBUTE_UNUSED)
1521 unsigned HOST_WIDE_INT size;
1523 if (type)
1524 size = int_size_in_bytes (type);
1525 else
1526 size = GET_MODE_SIZE (mode);
1528 return (size > 8 || size == 0);
1531 /* Return an RTX to represent where a value with mode MODE will be returned
1532 from a function. If the result is NULL_RTX, the argument is pushed. */
1534 static rtx
1535 mn10300_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1536 const_tree type, bool named ATTRIBUTE_UNUSED)
1538 rtx result = NULL_RTX;
1539 int size;
1541 /* We only support using 2 data registers as argument registers. */
1542 int nregs = 2;
1544 /* Figure out the size of the object to be passed. */
1545 if (mode == BLKmode)
1546 size = int_size_in_bytes (type);
1547 else
1548 size = GET_MODE_SIZE (mode);
1550 cum->nbytes = (cum->nbytes + 3) & ~3;
1552 /* Don't pass this arg via a register if all the argument registers
1553 are used up. */
1554 if (cum->nbytes > nregs * UNITS_PER_WORD)
1555 return result;
1557 /* Don't pass this arg via a register if it would be split between
1558 registers and memory. */
1559 if (type == NULL_TREE
1560 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1561 return result;
1563 switch (cum->nbytes / UNITS_PER_WORD)
1565 case 0:
1566 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM);
1567 break;
1568 case 1:
1569 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM + 1);
1570 break;
1571 default:
1572 break;
1575 return result;
1578 /* Update the data in CUM to advance over an argument
1579 of mode MODE and data type TYPE.
1580 (TYPE is null for libcalls where that information may not be available.) */
1582 static void
1583 mn10300_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1584 const_tree type, bool named ATTRIBUTE_UNUSED)
1586 cum->nbytes += (mode != BLKmode
1587 ? (GET_MODE_SIZE (mode) + 3) & ~3
1588 : (int_size_in_bytes (type) + 3) & ~3);
1591 /* Return the number of bytes of registers to use for an argument passed
1592 partially in registers and partially in memory. */
1594 static int
1595 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1596 tree type, bool named ATTRIBUTE_UNUSED)
1598 int size;
1600 /* We only support using 2 data registers as argument registers. */
1601 int nregs = 2;
1603 /* Figure out the size of the object to be passed. */
1604 if (mode == BLKmode)
1605 size = int_size_in_bytes (type);
1606 else
1607 size = GET_MODE_SIZE (mode);
1609 cum->nbytes = (cum->nbytes + 3) & ~3;
1611 /* Don't pass this arg via a register if all the argument registers
1612 are used up. */
1613 if (cum->nbytes > nregs * UNITS_PER_WORD)
1614 return 0;
1616 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1617 return 0;
1619 /* Don't pass this arg via a register if it would be split between
1620 registers and memory. */
1621 if (type == NULL_TREE
1622 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1623 return 0;
1625 return nregs * UNITS_PER_WORD - cum->nbytes;
1628 /* Return the location of the function's value. This will be either
1629 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1630 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1631 we only return the PARALLEL for outgoing values; we do not want
1632 callers relying on this extra copy. */
1634 static rtx
1635 mn10300_function_value (const_tree valtype,
1636 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1637 bool outgoing)
1639 rtx rv;
1640 enum machine_mode mode = TYPE_MODE (valtype);
1642 if (! POINTER_TYPE_P (valtype))
1643 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1644 else if (! TARGET_PTR_A0D0 || ! outgoing
1645 || cfun->returns_struct)
1646 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1648 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1649 XVECEXP (rv, 0, 0)
1650 = gen_rtx_EXPR_LIST (VOIDmode,
1651 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1652 GEN_INT (0));
1654 XVECEXP (rv, 0, 1)
1655 = gen_rtx_EXPR_LIST (VOIDmode,
1656 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1657 GEN_INT (0));
1658 return rv;
1661 /* Implements TARGET_LIBCALL_VALUE. */
1663 static rtx
1664 mn10300_libcall_value (enum machine_mode mode,
1665 const_rtx fun ATTRIBUTE_UNUSED)
1667 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1670 /* Implements FUNCTION_VALUE_REGNO_P. */
1672 bool
1673 mn10300_function_value_regno_p (const unsigned int regno)
1675 return (regno == FIRST_DATA_REGNUM || regno == FIRST_ADDRESS_REGNUM);
1678 /* Output a compare insn. */
1680 const char *
1681 mn10300_output_cmp (rtx operand, rtx insn)
1683 rtx temp;
1684 int past_call = 0;
1686 /* We can save a byte if we can find a register which has the value
1687 zero in it. */
1688 temp = PREV_INSN (insn);
1689 while (optimize && temp)
1691 rtx set;
1693 /* We allow the search to go through call insns. We record
1694 the fact that we've past a CALL_INSN and reject matches which
1695 use call clobbered registers. */
1696 if (LABEL_P (temp)
1697 || JUMP_P (temp)
1698 || GET_CODE (temp) == BARRIER)
1699 break;
1701 if (CALL_P (temp))
1702 past_call = 1;
1704 if (GET_CODE (temp) == NOTE)
1706 temp = PREV_INSN (temp);
1707 continue;
1710 /* It must be an insn, see if it is a simple set. */
1711 set = single_set (temp);
1712 if (!set)
1714 temp = PREV_INSN (temp);
1715 continue;
1718 /* Are we setting a data register to zero (this does not win for
1719 address registers)?
1721 If it's a call clobbered register, have we past a call?
1723 Make sure the register we find isn't the same as ourself;
1724 the mn10300 can't encode that.
1726 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1727 so the code to detect calls here isn't doing anything useful. */
1728 if (REG_P (SET_DEST (set))
1729 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1730 && !reg_set_between_p (SET_DEST (set), temp, insn)
1731 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1732 == REGNO_REG_CLASS (REGNO (operand)))
1733 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1734 && REGNO (SET_DEST (set)) != REGNO (operand)
1735 && (!past_call
1736 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1738 rtx xoperands[2];
1739 xoperands[0] = operand;
1740 xoperands[1] = SET_DEST (set);
1742 output_asm_insn ("cmp %1,%0", xoperands);
1743 return "";
1746 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1747 && REG_P (SET_DEST (set))
1748 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1749 && !reg_set_between_p (SET_DEST (set), temp, insn)
1750 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1751 != REGNO_REG_CLASS (REGNO (operand)))
1752 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1753 && REGNO (SET_DEST (set)) != REGNO (operand)
1754 && (!past_call
1755 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1757 rtx xoperands[2];
1758 xoperands[0] = operand;
1759 xoperands[1] = SET_DEST (set);
1761 output_asm_insn ("cmp %1,%0", xoperands);
1762 return "";
1764 temp = PREV_INSN (temp);
1766 return "cmp 0,%0";
1769 /* Similarly, but when using a zero_extract pattern for a btst where
1770 the source operand might end up in memory. */
1772 mn10300_mask_ok_for_mem_btst (int len, int bit)
1774 unsigned int mask = 0;
1776 while (len > 0)
1778 mask |= (1 << bit);
1779 bit++;
1780 len--;
1783 /* MASK must bit into an 8bit value. */
1784 return (((mask & 0xff) == mask)
1785 || ((mask & 0xff00) == mask)
1786 || ((mask & 0xff0000) == mask)
1787 || ((mask & 0xff000000) == mask));
1790 /* Return 1 if X contains a symbolic expression. We know these
1791 expressions will have one of a few well defined forms, so
1792 we need only check those forms. */
1795 mn10300_symbolic_operand (rtx op,
1796 enum machine_mode mode ATTRIBUTE_UNUSED)
1798 switch (GET_CODE (op))
1800 case SYMBOL_REF:
1801 case LABEL_REF:
1802 return 1;
1803 case CONST:
1804 op = XEXP (op, 0);
1805 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1806 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1807 && CONST_INT_P (XEXP (op, 1)));
1808 default:
1809 return 0;
1813 /* Try machine dependent ways of modifying an illegitimate address
1814 to be legitimate. If we find one, return the new valid address.
1815 This macro is used in only one place: `memory_address' in explow.c.
1817 OLDX is the address as it was before break_out_memory_refs was called.
1818 In some cases it is useful to look at this to decide what needs to be done.
1820 Normally it is always safe for this macro to do nothing. It exists to
1821 recognize opportunities to optimize the output.
1823 But on a few ports with segmented architectures and indexed addressing
1824 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1826 static rtx
1827 mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1828 enum machine_mode mode ATTRIBUTE_UNUSED)
1830 if (flag_pic && ! mn10300_legitimate_pic_operand_p (x))
1831 x = mn10300_legitimize_pic_address (oldx, NULL_RTX);
1833 /* Uh-oh. We might have an address for x[n-100000]. This needs
1834 special handling to avoid creating an indexed memory address
1835 with x-100000 as the base. */
1836 if (GET_CODE (x) == PLUS
1837 && mn10300_symbolic_operand (XEXP (x, 1), VOIDmode))
1839 /* Ugly. We modify things here so that the address offset specified
1840 by the index expression is computed first, then added to x to form
1841 the entire address. */
1843 rtx regx1, regy1, regy2, y;
1845 /* Strip off any CONST. */
1846 y = XEXP (x, 1);
1847 if (GET_CODE (y) == CONST)
1848 y = XEXP (y, 0);
1850 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1852 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1853 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1854 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1855 regx1 = force_reg (Pmode,
1856 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1,
1857 regy2));
1858 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1861 return x;
1864 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1865 @GOTOFF in `reg'. */
1868 mn10300_legitimize_pic_address (rtx orig, rtx reg)
1870 if (GET_CODE (orig) == LABEL_REF
1871 || (GET_CODE (orig) == SYMBOL_REF
1872 && (CONSTANT_POOL_ADDRESS_P (orig)
1873 || ! MN10300_GLOBAL_P (orig))))
1875 if (reg == 0)
1876 reg = gen_reg_rtx (Pmode);
1878 emit_insn (gen_symGOTOFF2reg (reg, orig));
1879 return reg;
1881 else if (GET_CODE (orig) == SYMBOL_REF)
1883 if (reg == 0)
1884 reg = gen_reg_rtx (Pmode);
1886 emit_insn (gen_symGOT2reg (reg, orig));
1887 return reg;
1889 return orig;
1892 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1893 isn't protected by a PIC unspec; nonzero otherwise. */
1896 mn10300_legitimate_pic_operand_p (rtx x)
1898 const char *fmt;
1899 int i;
1901 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1902 return 0;
1904 if (GET_CODE (x) == UNSPEC
1905 && (XINT (x, 1) == UNSPEC_PIC
1906 || XINT (x, 1) == UNSPEC_GOT
1907 || XINT (x, 1) == UNSPEC_GOTOFF
1908 || XINT (x, 1) == UNSPEC_PLT
1909 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
1910 return 1;
1912 fmt = GET_RTX_FORMAT (GET_CODE (x));
1913 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1915 if (fmt[i] == 'E')
1917 int j;
1919 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1920 if (! mn10300_legitimate_pic_operand_p (XVECEXP (x, i, j)))
1921 return 0;
1923 else if (fmt[i] == 'e'
1924 && ! mn10300_legitimate_pic_operand_p (XEXP (x, i)))
1925 return 0;
1928 return 1;
1931 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1932 legitimate, and FALSE otherwise.
1934 On the mn10300, the value in the address register must be
1935 in the same memory space/segment as the effective address.
1937 This is problematical for reload since it does not understand
1938 that base+index != index+base in a memory reference.
1940 Note it is still possible to use reg+reg addressing modes,
1941 it's just much more difficult. For a discussion of a possible
1942 workaround and solution, see the comments in pa.c before the
1943 function record_unscaled_index_insn_codes. */
1945 static bool
1946 mn10300_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1948 if (CONSTANT_ADDRESS_P (x)
1949 && (! flag_pic || mn10300_legitimate_pic_operand_p (x)))
1950 return TRUE;
1952 if (RTX_OK_FOR_BASE_P (x, strict))
1953 return TRUE;
1955 if (TARGET_AM33
1956 && GET_CODE (x) == POST_INC
1957 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1958 && (mode == SImode || mode == SFmode || mode == HImode))
1959 return TRUE;
1961 if (GET_CODE (x) == PLUS)
1963 rtx base = 0, index = 0;
1965 if (REG_P (XEXP (x, 0))
1966 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 0)), strict))
1968 base = XEXP (x, 0);
1969 index = XEXP (x, 1);
1972 if (REG_P (XEXP (x, 1))
1973 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 1)), strict))
1975 base = XEXP (x, 1);
1976 index = XEXP (x, 0);
1979 if (base != 0 && index != 0)
1981 if (CONST_INT_P (index))
1982 return TRUE;
1983 if (GET_CODE (index) == CONST
1984 && GET_CODE (XEXP (index, 0)) != PLUS
1985 && (! flag_pic
1986 || (mn10300_legitimate_pic_operand_p (index)
1987 && GET_MODE_SIZE (mode) == 4)))
1988 return TRUE;
1992 return FALSE;
1995 /* Used by LEGITIMATE_CONSTANT_P(). Returns TRUE if X is a valid
1996 constant. Note that some "constants" aren't valid, such as TLS
1997 symbols and unconverted GOT-based references, so we eliminate
1998 those here. */
2000 bool
2001 mn10300_legitimate_constant_p (rtx x)
2003 switch (GET_CODE (x))
2005 case CONST:
2006 x = XEXP (x, 0);
2008 if (GET_CODE (x) == PLUS)
2010 if (! CONST_INT_P (XEXP (x, 1)))
2011 return false;
2012 x = XEXP (x, 0);
2015 /* Only some unspecs are valid as "constants". */
2016 if (GET_CODE (x) == UNSPEC)
2018 switch (XINT (x, 1))
2020 case UNSPEC_INT_LABEL:
2021 case UNSPEC_PIC:
2022 case UNSPEC_GOT:
2023 case UNSPEC_GOTOFF:
2024 case UNSPEC_PLT:
2025 return true;
2026 default:
2027 return false;
2031 /* We must have drilled down to a symbol. */
2032 if (! mn10300_symbolic_operand (x, Pmode))
2033 return false;
2034 break;
2036 default:
2037 break;
2040 return true;
2043 static int
2044 mn10300_address_cost_1 (rtx x, int *unsig)
2046 switch (GET_CODE (x))
2048 case REG:
2049 switch (REGNO_REG_CLASS (REGNO (x)))
2051 case SP_REGS:
2052 *unsig = 1;
2053 return 0;
2055 case ADDRESS_REGS:
2056 return 1;
2058 case DATA_REGS:
2059 case EXTENDED_REGS:
2060 case FP_REGS:
2061 return 3;
2063 case NO_REGS:
2064 return 5;
2066 default:
2067 gcc_unreachable ();
2070 case PLUS:
2071 case MINUS:
2072 case ASHIFT:
2073 case AND:
2074 case IOR:
2075 return (mn10300_address_cost_1 (XEXP (x, 0), unsig)
2076 + mn10300_address_cost_1 (XEXP (x, 1), unsig));
2078 case EXPR_LIST:
2079 case SUBREG:
2080 case MEM:
2081 return mn10300_address_cost (XEXP (x, 0), !optimize_size);
2083 case ZERO_EXTEND:
2084 *unsig = 1;
2085 return mn10300_address_cost_1 (XEXP (x, 0), unsig);
2087 case CONST_INT:
2088 if (INTVAL (x) == 0)
2089 return 0;
2090 if (INTVAL (x) + (*unsig ? 0 : 0x80) < 0x100)
2091 return 1;
2092 if (INTVAL (x) + (*unsig ? 0 : 0x8000) < 0x10000)
2093 return 3;
2094 if (INTVAL (x) + (*unsig ? 0 : 0x800000) < 0x1000000)
2095 return 5;
2096 return 7;
2098 case CONST:
2099 case SYMBOL_REF:
2100 case LABEL_REF:
2101 return 8;
2103 default:
2104 gcc_unreachable ();
2109 static int
2110 mn10300_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
2112 int s = 0;
2113 return mn10300_address_cost_1 (x, &s);
2116 static bool
2117 mn10300_rtx_costs (rtx x, int code, int outer_code, int *total,
2118 bool speed ATTRIBUTE_UNUSED)
2120 switch (code)
2122 case CONST_INT:
2123 /* Zeros are extremely cheap. */
2124 if (INTVAL (x) == 0 && (outer_code == SET || outer_code == COMPARE))
2125 *total = 0;
2126 /* If it fits in 8 bits, then it's still relatively cheap. */
2127 else if (INT_8_BITS (INTVAL (x)))
2128 *total = 1;
2129 /* This is the "base" cost, includes constants where either the
2130 upper or lower 16bits are all zeros. */
2131 else if (INT_16_BITS (INTVAL (x))
2132 || (INTVAL (x) & 0xffff) == 0
2133 || (INTVAL (x) & 0xffff0000) == 0)
2134 *total = 2;
2135 else
2136 *total = 4;
2137 return true;
2139 case CONST:
2140 case LABEL_REF:
2141 case SYMBOL_REF:
2142 /* These are more costly than a CONST_INT, but we can relax them,
2143 so they're less costly than a CONST_DOUBLE. */
2144 *total = 6;
2145 return true;
2147 case CONST_DOUBLE:
2148 /* We don't optimize CONST_DOUBLEs well nor do we relax them well,
2149 so their cost is very high. */
2150 *total = 8;
2151 return true;
2153 case ZERO_EXTRACT:
2154 /* This is cheap, we can use btst. */
2155 if (outer_code == COMPARE)
2156 *total = 0;
2157 return false;
2159 /* ??? This probably needs more work. */
2160 case MOD:
2161 case DIV:
2162 case MULT:
2163 *total = 8;
2164 return true;
2166 default:
2167 return false;
2171 /* Check whether a constant used to initialize a DImode or DFmode can
2172 use a clr instruction. The code here must be kept in sync with
2173 movdf and movdi. */
2175 bool
2176 mn10300_wide_const_load_uses_clr (rtx operands[2])
2178 long val[2] = {0, 0};
2180 if ((! REG_P (operands[0]))
2181 || REGNO_REG_CLASS (REGNO (operands[0])) != DATA_REGS)
2182 return false;
2184 switch (GET_CODE (operands[1]))
2186 case CONST_INT:
2188 rtx low, high;
2189 split_double (operands[1], &low, &high);
2190 val[0] = INTVAL (low);
2191 val[1] = INTVAL (high);
2193 break;
2195 case CONST_DOUBLE:
2196 if (GET_MODE (operands[1]) == DFmode)
2198 REAL_VALUE_TYPE rv;
2200 REAL_VALUE_FROM_CONST_DOUBLE (rv, operands[1]);
2201 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
2203 else if (GET_MODE (operands[1]) == VOIDmode
2204 || GET_MODE (operands[1]) == DImode)
2206 val[0] = CONST_DOUBLE_LOW (operands[1]);
2207 val[1] = CONST_DOUBLE_HIGH (operands[1]);
2209 break;
2211 default:
2212 return false;
2215 return val[0] == 0 || val[1] == 0;
2217 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2218 may access it using GOTOFF instead of GOT. */
2220 static void
2221 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2223 rtx symbol;
2225 if (! MEM_P (rtl))
2226 return;
2227 symbol = XEXP (rtl, 0);
2228 if (GET_CODE (symbol) != SYMBOL_REF)
2229 return;
2231 if (flag_pic)
2232 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2235 /* Dispatch tables on the mn10300 are extremely expensive in terms of code
2236 and readonly data size. So we crank up the case threshold value to
2237 encourage a series of if/else comparisons to implement many small switch
2238 statements. In theory, this value could be increased much more if we
2239 were solely optimizing for space, but we keep it "reasonable" to avoid
2240 serious code efficiency lossage. */
2242 static unsigned int
2243 mn10300_case_values_threshold (void)
2245 return 6;
2248 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2250 static void
2251 mn10300_asm_trampoline_template (FILE *f)
2253 fprintf (f, "\tadd -4,sp\n");
2254 fprintf (f, "\t.long 0x0004fffa\n");
2255 fprintf (f, "\tmov (0,sp),a0\n");
2256 fprintf (f, "\tadd 4,sp\n");
2257 fprintf (f, "\tmov (13,a0),a1\n");
2258 fprintf (f, "\tmov (17,a0),a0\n");
2259 fprintf (f, "\tjmp (a0)\n");
2260 fprintf (f, "\t.long 0\n");
2261 fprintf (f, "\t.long 0\n");
2264 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2266 static void
2267 mn10300_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2269 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2270 rtx mem;
2272 emit_block_move (m_tramp, assemble_trampoline_template (),
2273 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2275 mem = adjust_address (m_tramp, SImode, 0x14);
2276 emit_move_insn (mem, chain_value);
2277 mem = adjust_address (m_tramp, SImode, 0x18);
2278 emit_move_insn (mem, fnaddr);
2281 /* Output the assembler code for a C++ thunk function.
2282 THUNK_DECL is the declaration for the thunk function itself, FUNCTION
2283 is the decl for the target function. DELTA is an immediate constant
2284 offset to be added to the THIS parameter. If VCALL_OFFSET is nonzero
2285 the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
2286 additionally added to THIS. Finally jump to the entry point of
2287 FUNCTION. */
2289 static void
2290 mn10300_asm_output_mi_thunk (FILE * file,
2291 tree thunk_fndecl ATTRIBUTE_UNUSED,
2292 HOST_WIDE_INT delta,
2293 HOST_WIDE_INT vcall_offset,
2294 tree function)
2296 const char * _this;
2298 /* Get the register holding the THIS parameter. Handle the case
2299 where there is a hidden first argument for a returned structure. */
2300 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
2301 _this = reg_names [FIRST_ARGUMENT_REGNUM + 1];
2302 else
2303 _this = reg_names [FIRST_ARGUMENT_REGNUM];
2305 fprintf (file, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START);
2307 if (delta)
2308 fprintf (file, "\tadd %d, %s\n", (int) delta, _this);
2310 if (vcall_offset)
2312 const char * scratch = reg_names [FIRST_ADDRESS_REGNUM + 1];
2314 fprintf (file, "\tmov %s, %s\n", _this, scratch);
2315 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2316 fprintf (file, "\tadd %d, %s\n", (int) vcall_offset, scratch);
2317 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2318 fprintf (file, "\tadd %s, %s\n", scratch, _this);
2321 fputs ("\tjmp ", file);
2322 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
2323 putc ('\n', file);
2326 /* Return true if mn10300_output_mi_thunk would be able to output the
2327 assembler code for the thunk function specified by the arguments
2328 it is passed, and false otherwise. */
2330 static bool
2331 mn10300_can_output_mi_thunk (const_tree thunk_fndecl ATTRIBUTE_UNUSED,
2332 HOST_WIDE_INT delta ATTRIBUTE_UNUSED,
2333 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
2334 const_tree function ATTRIBUTE_UNUSED)
2336 return true;
2339 bool
2340 mn10300_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
2342 if (REGNO_REG_CLASS (regno) == FP_REGS
2343 || REGNO_REG_CLASS (regno) == FP_ACC_REGS)
2344 /* Do not store integer values in FP registers. */
2345 return GET_MODE_CLASS (mode) == MODE_FLOAT && ((regno & 1) == 0);
2347 if (((regno) & 1) == 0 || GET_MODE_SIZE (mode) == 4)
2348 return true;
2350 if (REGNO_REG_CLASS (regno) == DATA_REGS
2351 || (TARGET_AM33 && REGNO_REG_CLASS (regno) == ADDRESS_REGS)
2352 || REGNO_REG_CLASS (regno) == EXTENDED_REGS)
2353 return GET_MODE_SIZE (mode) <= 4;
2355 return false;
2358 bool
2359 mn10300_modes_tieable (enum machine_mode mode1, enum machine_mode mode2)
2361 if (GET_MODE_CLASS (mode1) == MODE_FLOAT
2362 && GET_MODE_CLASS (mode2) != MODE_FLOAT)
2363 return false;
2365 if (GET_MODE_CLASS (mode2) == MODE_FLOAT
2366 && GET_MODE_CLASS (mode1) != MODE_FLOAT)
2367 return false;
2369 if (TARGET_AM33
2370 || mode1 == mode2
2371 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4))
2372 return true;
2374 return false;
2377 enum machine_mode
2378 mn10300_select_cc_mode (rtx x)
2380 return (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) ? CC_FLOATmode : CCmode;
2383 static inline bool
2384 is_load_insn (rtx insn)
2386 if (GET_CODE (PATTERN (insn)) != SET)
2387 return false;
2389 return MEM_P (SET_SRC (PATTERN (insn)));
2392 static inline bool
2393 is_store_insn (rtx insn)
2395 if (GET_CODE (PATTERN (insn)) != SET)
2396 return false;
2398 return MEM_P (SET_DEST (PATTERN (insn)));
2401 /* Update scheduling costs for situations that cannot be
2402 described using the attributes and DFA machinery.
2403 DEP is the insn being scheduled.
2404 INSN is the previous insn.
2405 COST is the current cycle cost for DEP. */
2407 static int
2408 mn10300_adjust_sched_cost (rtx insn, rtx link, rtx dep, int cost)
2410 int timings = get_attr_timings (insn);
2412 if (!TARGET_AM33)
2413 return 1;
2415 if (GET_CODE (insn) == PARALLEL)
2416 insn = XVECEXP (insn, 0, 0);
2418 if (GET_CODE (dep) == PARALLEL)
2419 dep = XVECEXP (dep, 0, 0);
2421 /* For the AM34 a load instruction that follows a
2422 store instruction incurs an extra cycle of delay. */
2423 if (mn10300_tune_cpu == PROCESSOR_AM34
2424 && is_load_insn (dep)
2425 && is_store_insn (insn))
2426 cost += 1;
2428 /* For the AM34 a non-store, non-branch FPU insn that follows
2429 another FPU insn incurs a one cycle throughput increase. */
2430 else if (mn10300_tune_cpu == PROCESSOR_AM34
2431 && ! is_store_insn (insn)
2432 && ! JUMP_P (insn)
2433 && GET_CODE (PATTERN (dep)) == SET
2434 && GET_CODE (PATTERN (insn)) == SET
2435 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) == MODE_FLOAT
2436 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) == MODE_FLOAT)
2437 cost += 1;
2439 /* Resolve the conflict described in section 1-7-4 of
2440 Chapter 3 of the MN103E Series Instruction Manual
2441 where it says:
2443 "When the preceeding instruction is a CPU load or
2444 store instruction, a following FPU instruction
2445 cannot be executed until the CPU completes the
2446 latency period even though there are no register
2447 or flag dependencies between them." */
2449 /* Only the AM33-2 (and later) CPUs have FPU instructions. */
2450 if (! TARGET_AM33_2)
2451 return cost;
2453 /* If a data dependence already exists then the cost is correct. */
2454 if (REG_NOTE_KIND (link) == 0)
2455 return cost;
2457 /* Check that the instruction about to scheduled is an FPU instruction. */
2458 if (GET_CODE (PATTERN (dep)) != SET)
2459 return cost;
2461 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) != MODE_FLOAT)
2462 return cost;
2464 /* Now check to see if the previous instruction is a load or store. */
2465 if (! is_load_insn (insn) && ! is_store_insn (insn))
2466 return cost;
2468 /* XXX: Verify: The text of 1-7-4 implies that the restriction
2469 only applies when an INTEGER load/store preceeds an FPU
2470 instruction, but is this true ? For now we assume that it is. */
2471 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) != MODE_INT)
2472 return cost;
2474 /* Extract the latency value from the timings attribute. */
2475 return timings < 100 ? (timings % 10) : (timings % 100);
2478 static void
2479 mn10300_conditional_register_usage (void)
2481 unsigned int i;
2483 if (!TARGET_AM33)
2485 for (i = FIRST_EXTENDED_REGNUM;
2486 i <= LAST_EXTENDED_REGNUM; i++)
2487 fixed_regs[i] = call_used_regs[i] = 1;
2489 if (!TARGET_AM33_2)
2491 for (i = FIRST_FP_REGNUM;
2492 i <= LAST_FP_REGNUM; i++)
2493 fixed_regs[i] = call_used_regs[i] = 1;
2495 if (flag_pic)
2496 fixed_regs[PIC_OFFSET_TABLE_REGNUM] =
2497 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2500 /* Initialize the GCC target structure. */
2502 #undef TARGET_EXCEPT_UNWIND_INFO
2503 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2505 #undef TARGET_ASM_ALIGNED_HI_OP
2506 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2508 #undef TARGET_LEGITIMIZE_ADDRESS
2509 #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
2511 #undef TARGET_RTX_COSTS
2512 #define TARGET_RTX_COSTS mn10300_rtx_costs
2513 #undef TARGET_ADDRESS_COST
2514 #define TARGET_ADDRESS_COST mn10300_address_cost
2516 #undef TARGET_ASM_FILE_START
2517 #define TARGET_ASM_FILE_START mn10300_file_start
2518 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
2519 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
2521 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
2522 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA mn10300_asm_output_addr_const_extra
2524 #undef TARGET_DEFAULT_TARGET_FLAGS
2525 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
2526 #undef TARGET_HANDLE_OPTION
2527 #define TARGET_HANDLE_OPTION mn10300_handle_option
2528 #undef TARGET_OPTION_OVERRIDE
2529 #define TARGET_OPTION_OVERRIDE mn10300_option_override
2530 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2531 #define TARGET_OPTION_OPTIMIZATION_TABLE mn10300_option_optimization_table
2533 #undef TARGET_ENCODE_SECTION_INFO
2534 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
2536 #undef TARGET_PROMOTE_PROTOTYPES
2537 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2538 #undef TARGET_RETURN_IN_MEMORY
2539 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
2540 #undef TARGET_PASS_BY_REFERENCE
2541 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
2542 #undef TARGET_CALLEE_COPIES
2543 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
2544 #undef TARGET_ARG_PARTIAL_BYTES
2545 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
2546 #undef TARGET_FUNCTION_ARG
2547 #define TARGET_FUNCTION_ARG mn10300_function_arg
2548 #undef TARGET_FUNCTION_ARG_ADVANCE
2549 #define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
2551 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
2552 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
2553 #undef TARGET_EXPAND_BUILTIN_VA_START
2554 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
2556 #undef TARGET_CASE_VALUES_THRESHOLD
2557 #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
2559 #undef TARGET_LEGITIMATE_ADDRESS_P
2560 #define TARGET_LEGITIMATE_ADDRESS_P mn10300_legitimate_address_p
2562 #undef TARGET_PREFERRED_RELOAD_CLASS
2563 #define TARGET_PREFERRED_RELOAD_CLASS mn10300_preferred_reload_class
2564 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2565 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS mn10300_preferred_output_reload_class
2567 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2568 #define TARGET_ASM_TRAMPOLINE_TEMPLATE mn10300_asm_trampoline_template
2569 #undef TARGET_TRAMPOLINE_INIT
2570 #define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
2572 #undef TARGET_FUNCTION_VALUE
2573 #define TARGET_FUNCTION_VALUE mn10300_function_value
2574 #undef TARGET_LIBCALL_VALUE
2575 #define TARGET_LIBCALL_VALUE mn10300_libcall_value
2577 #undef TARGET_ASM_OUTPUT_MI_THUNK
2578 #define TARGET_ASM_OUTPUT_MI_THUNK mn10300_asm_output_mi_thunk
2579 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2580 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK mn10300_can_output_mi_thunk
2582 #undef TARGET_SCHED_ADJUST_COST
2583 #define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
2585 #undef TARGET_CONDITIONAL_REGISTER_USAGE
2586 #define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
2588 struct gcc_target targetm = TARGET_INITIALIZER;