* integrate.c: Remove.
[official-gcc.git] / gcc / config / v850 / v850.c
blob186327da7ac94c07be2f7ab347be9f223a1d4f81
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "diagnostic-core.h"
39 #include "ggc.h"
40 #include "tm_p.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "df.h"
44 #include "opts.h"
46 #ifndef streq
47 #define streq(a,b) (strcmp (a, b) == 0)
48 #endif
50 static void v850_print_operand_address (FILE *, rtx);
52 /* Names of the various data areas used on the v850. */
53 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
54 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
56 /* Track the current data area set by the data area pragma (which
57 can be nested). Tested by check_default_data_area. */
58 data_area_stack_element * data_area_stack = NULL;
60 /* True if we don't need to check any more if the current
61 function is an interrupt handler. */
62 static int v850_interrupt_cache_p = FALSE;
64 rtx v850_compare_op0, v850_compare_op1;
66 /* Whether current function is an interrupt handler. */
67 static int v850_interrupt_p = FALSE;
69 static GTY(()) section * rosdata_section;
70 static GTY(()) section * rozdata_section;
71 static GTY(()) section * tdata_section;
72 static GTY(()) section * zdata_section;
73 static GTY(()) section * zbss_section;
75 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
76 Specify whether to pass the argument by reference. */
78 static bool
79 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
80 enum machine_mode mode, const_tree type,
81 bool named ATTRIBUTE_UNUSED)
83 unsigned HOST_WIDE_INT size;
85 if (type)
86 size = int_size_in_bytes (type);
87 else
88 size = GET_MODE_SIZE (mode);
90 return size > 8;
93 /* Implementing the Varargs Macros. */
95 static bool
96 v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
98 return !TARGET_GHS ? true : false;
101 /* Return an RTX to represent where an argument with mode MODE
102 and type TYPE will be passed to a function. If the result
103 is NULL_RTX, the argument will be pushed. */
105 static rtx
106 v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
107 const_tree type, bool named)
109 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
110 rtx result = NULL_RTX;
111 int size, align;
113 if (!named)
114 return NULL_RTX;
116 if (mode == BLKmode)
117 size = int_size_in_bytes (type);
118 else
119 size = GET_MODE_SIZE (mode);
121 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
123 if (size < 1)
125 /* Once we have stopped using argument registers, do not start up again. */
126 cum->nbytes = 4 * UNITS_PER_WORD;
127 return NULL_RTX;
130 if (size <= UNITS_PER_WORD && type)
131 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
132 else
133 align = size;
135 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
137 if (cum->nbytes > 4 * UNITS_PER_WORD)
138 return NULL_RTX;
140 if (type == NULL_TREE
141 && cum->nbytes + size > 4 * UNITS_PER_WORD)
142 return NULL_RTX;
144 switch (cum->nbytes / UNITS_PER_WORD)
146 case 0:
147 result = gen_rtx_REG (mode, 6);
148 break;
149 case 1:
150 result = gen_rtx_REG (mode, 7);
151 break;
152 case 2:
153 result = gen_rtx_REG (mode, 8);
154 break;
155 case 3:
156 result = gen_rtx_REG (mode, 9);
157 break;
158 default:
159 result = NULL_RTX;
162 return result;
165 /* Return the number of bytes which must be put into registers
166 for values which are part in registers and part in memory. */
167 static int
168 v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
169 tree type, bool named)
171 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
172 int size, align;
174 if (TARGET_GHS && !named)
175 return 0;
177 if (mode == BLKmode)
178 size = int_size_in_bytes (type);
179 else
180 size = GET_MODE_SIZE (mode);
182 if (size < 1)
183 size = 1;
185 if (type)
186 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
187 else
188 align = size;
190 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
192 if (cum->nbytes > 4 * UNITS_PER_WORD)
193 return 0;
195 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
196 return 0;
198 if (type == NULL_TREE
199 && cum->nbytes + size > 4 * UNITS_PER_WORD)
200 return 0;
202 return 4 * UNITS_PER_WORD - cum->nbytes;
205 /* Update the data in CUM to advance over an argument
206 of mode MODE and data type TYPE.
207 (TYPE is null for libcalls where that information may not be available.) */
209 static void
210 v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
211 const_tree type, bool named ATTRIBUTE_UNUSED)
213 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
215 cum->nbytes += (((type && int_size_in_bytes (type) > 8
216 ? GET_MODE_SIZE (Pmode)
217 : (mode != BLKmode
218 ? GET_MODE_SIZE (mode)
219 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
220 & -UNITS_PER_WORD);
223 /* Return the high and low words of a CONST_DOUBLE */
225 static void
226 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
228 if (GET_CODE (x) == CONST_DOUBLE)
230 long t[2];
231 REAL_VALUE_TYPE rv;
233 switch (GET_MODE (x))
235 case DFmode:
236 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
237 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
238 *p_high = t[1]; /* since v850 is little endian */
239 *p_low = t[0]; /* high is second word */
240 return;
242 case SFmode:
243 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
244 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
245 *p_low = 0;
246 return;
248 case VOIDmode:
249 case DImode:
250 *p_high = CONST_DOUBLE_HIGH (x);
251 *p_low = CONST_DOUBLE_LOW (x);
252 return;
254 default:
255 break;
259 fatal_insn ("const_double_split got a bad insn:", x);
263 /* Return the cost of the rtx R with code CODE. */
265 static int
266 const_costs_int (HOST_WIDE_INT value, int zero_cost)
268 if (CONST_OK_FOR_I (value))
269 return zero_cost;
270 else if (CONST_OK_FOR_J (value))
271 return 1;
272 else if (CONST_OK_FOR_K (value))
273 return 2;
274 else
275 return 4;
278 static int
279 const_costs (rtx r, enum rtx_code c)
281 HOST_WIDE_INT high, low;
283 switch (c)
285 case CONST_INT:
286 return const_costs_int (INTVAL (r), 0);
288 case CONST_DOUBLE:
289 const_double_split (r, &high, &low);
290 if (GET_MODE (r) == SFmode)
291 return const_costs_int (high, 1);
292 else
293 return const_costs_int (high, 1) + const_costs_int (low, 1);
295 case SYMBOL_REF:
296 case LABEL_REF:
297 case CONST:
298 return 2;
300 case HIGH:
301 return 1;
303 default:
304 return 4;
308 static bool
309 v850_rtx_costs (rtx x,
310 int codearg,
311 int outer_code ATTRIBUTE_UNUSED,
312 int opno ATTRIBUTE_UNUSED,
313 int * total, bool speed)
315 enum rtx_code code = (enum rtx_code) codearg;
317 switch (code)
319 case CONST_INT:
320 case CONST_DOUBLE:
321 case CONST:
322 case SYMBOL_REF:
323 case LABEL_REF:
324 *total = COSTS_N_INSNS (const_costs (x, code));
325 return true;
327 case MOD:
328 case DIV:
329 case UMOD:
330 case UDIV:
331 if (TARGET_V850E && !speed)
332 *total = 6;
333 else
334 *total = 60;
335 return true;
337 case MULT:
338 if (TARGET_V850E
339 && ( GET_MODE (x) == SImode
340 || GET_MODE (x) == HImode
341 || GET_MODE (x) == QImode))
343 if (GET_CODE (XEXP (x, 1)) == REG)
344 *total = 4;
345 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
347 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
348 *total = 6;
349 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
350 *total = 10;
353 else
354 *total = 20;
355 return true;
357 case ZERO_EXTRACT:
358 if (outer_code == COMPARE)
359 *total = 0;
360 return false;
362 default:
363 return false;
367 /* Print operand X using operand code CODE to assembly language output file
368 FILE. */
370 static void
371 v850_print_operand (FILE * file, rtx x, int code)
373 HOST_WIDE_INT high, low;
375 switch (code)
377 case 'c':
378 /* We use 'c' operands with symbols for .vtinherit */
379 if (GET_CODE (x) == SYMBOL_REF)
381 output_addr_const(file, x);
382 break;
384 /* fall through */
385 case 'b':
386 case 'B':
387 case 'C':
388 switch ((code == 'B' || code == 'C')
389 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
391 case NE:
392 if (code == 'c' || code == 'C')
393 fprintf (file, "nz");
394 else
395 fprintf (file, "ne");
396 break;
397 case EQ:
398 if (code == 'c' || code == 'C')
399 fprintf (file, "z");
400 else
401 fprintf (file, "e");
402 break;
403 case GE:
404 fprintf (file, "ge");
405 break;
406 case GT:
407 fprintf (file, "gt");
408 break;
409 case LE:
410 fprintf (file, "le");
411 break;
412 case LT:
413 fprintf (file, "lt");
414 break;
415 case GEU:
416 fprintf (file, "nl");
417 break;
418 case GTU:
419 fprintf (file, "h");
420 break;
421 case LEU:
422 fprintf (file, "nh");
423 break;
424 case LTU:
425 fprintf (file, "l");
426 break;
427 default:
428 gcc_unreachable ();
430 break;
431 case 'F': /* high word of CONST_DOUBLE */
432 switch (GET_CODE (x))
434 case CONST_INT:
435 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
436 break;
438 case CONST_DOUBLE:
439 const_double_split (x, &high, &low);
440 fprintf (file, "%ld", (long) high);
441 break;
443 default:
444 gcc_unreachable ();
446 break;
447 case 'G': /* low word of CONST_DOUBLE */
448 switch (GET_CODE (x))
450 case CONST_INT:
451 fprintf (file, "%ld", (long) INTVAL (x));
452 break;
454 case CONST_DOUBLE:
455 const_double_split (x, &high, &low);
456 fprintf (file, "%ld", (long) low);
457 break;
459 default:
460 gcc_unreachable ();
462 break;
463 case 'L':
464 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
465 break;
466 case 'M':
467 fprintf (file, "%d", exact_log2 (INTVAL (x)));
468 break;
469 case 'O':
470 gcc_assert (special_symbolref_operand (x, VOIDmode));
472 if (GET_CODE (x) == CONST)
473 x = XEXP (XEXP (x, 0), 0);
474 else
475 gcc_assert (GET_CODE (x) == SYMBOL_REF);
477 if (SYMBOL_REF_ZDA_P (x))
478 fprintf (file, "zdaoff");
479 else if (SYMBOL_REF_SDA_P (x))
480 fprintf (file, "sdaoff");
481 else if (SYMBOL_REF_TDA_P (x))
482 fprintf (file, "tdaoff");
483 else
484 gcc_unreachable ();
485 break;
486 case 'P':
487 gcc_assert (special_symbolref_operand (x, VOIDmode));
488 output_addr_const (file, x);
489 break;
490 case 'Q':
491 gcc_assert (special_symbolref_operand (x, VOIDmode));
493 if (GET_CODE (x) == CONST)
494 x = XEXP (XEXP (x, 0), 0);
495 else
496 gcc_assert (GET_CODE (x) == SYMBOL_REF);
498 if (SYMBOL_REF_ZDA_P (x))
499 fprintf (file, "r0");
500 else if (SYMBOL_REF_SDA_P (x))
501 fprintf (file, "gp");
502 else if (SYMBOL_REF_TDA_P (x))
503 fprintf (file, "ep");
504 else
505 gcc_unreachable ();
506 break;
507 case 'R': /* 2nd word of a double. */
508 switch (GET_CODE (x))
510 case REG:
511 fprintf (file, reg_names[REGNO (x) + 1]);
512 break;
513 case MEM:
514 x = XEXP (adjust_address (x, SImode, 4), 0);
515 v850_print_operand_address (file, x);
516 if (GET_CODE (x) == CONST_INT)
517 fprintf (file, "[r0]");
518 break;
520 default:
521 break;
523 break;
524 case 'S':
526 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
527 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
528 fputs ("s", file);
530 break;
532 case 'T':
534 /* Like an 'S' operand above, but for unsigned loads only. */
535 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
536 fputs ("s", file);
538 break;
540 case 'W': /* print the instruction suffix */
541 switch (GET_MODE (x))
543 default:
544 gcc_unreachable ();
546 case QImode: fputs (".b", file); break;
547 case HImode: fputs (".h", file); break;
548 case SImode: fputs (".w", file); break;
549 case SFmode: fputs (".w", file); break;
551 break;
552 case '.': /* register r0 */
553 fputs (reg_names[0], file);
554 break;
555 case 'z': /* reg or zero */
556 if (GET_CODE (x) == REG)
557 fputs (reg_names[REGNO (x)], file);
558 else if ((GET_MODE(x) == SImode
559 || GET_MODE(x) == DFmode
560 || GET_MODE(x) == SFmode)
561 && x == CONST0_RTX(GET_MODE(x)))
562 fputs (reg_names[0], file);
563 else
565 gcc_assert (x == const0_rtx);
566 fputs (reg_names[0], file);
568 break;
569 default:
570 switch (GET_CODE (x))
572 case MEM:
573 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
574 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
575 XEXP (x, 0)));
576 else
577 output_address (XEXP (x, 0));
578 break;
580 case REG:
581 fputs (reg_names[REGNO (x)], file);
582 break;
583 case SUBREG:
584 fputs (reg_names[subreg_regno (x)], file);
585 break;
586 case CONST_INT:
587 case SYMBOL_REF:
588 case CONST:
589 case LABEL_REF:
590 case CODE_LABEL:
591 v850_print_operand_address (file, x);
592 break;
593 default:
594 gcc_unreachable ();
596 break;
602 /* Output assembly language output for the address ADDR to FILE. */
604 static void
605 v850_print_operand_address (FILE * file, rtx addr)
607 switch (GET_CODE (addr))
609 case REG:
610 fprintf (file, "0[");
611 v850_print_operand (file, addr, 0);
612 fprintf (file, "]");
613 break;
614 case LO_SUM:
615 if (GET_CODE (XEXP (addr, 0)) == REG)
617 /* reg,foo */
618 fprintf (file, "lo(");
619 v850_print_operand (file, XEXP (addr, 1), 0);
620 fprintf (file, ")[");
621 v850_print_operand (file, XEXP (addr, 0), 0);
622 fprintf (file, "]");
624 break;
625 case PLUS:
626 if (GET_CODE (XEXP (addr, 0)) == REG
627 || GET_CODE (XEXP (addr, 0)) == SUBREG)
629 /* reg,foo */
630 v850_print_operand (file, XEXP (addr, 1), 0);
631 fprintf (file, "[");
632 v850_print_operand (file, XEXP (addr, 0), 0);
633 fprintf (file, "]");
635 else
637 v850_print_operand (file, XEXP (addr, 0), 0);
638 fprintf (file, "+");
639 v850_print_operand (file, XEXP (addr, 1), 0);
641 break;
642 case SYMBOL_REF:
644 const char *off_name = NULL;
645 const char *reg_name = NULL;
647 if (SYMBOL_REF_ZDA_P (addr))
649 off_name = "zdaoff";
650 reg_name = "r0";
652 else if (SYMBOL_REF_SDA_P (addr))
654 off_name = "sdaoff";
655 reg_name = "gp";
657 else if (SYMBOL_REF_TDA_P (addr))
659 off_name = "tdaoff";
660 reg_name = "ep";
663 if (off_name)
664 fprintf (file, "%s(", off_name);
665 output_addr_const (file, addr);
666 if (reg_name)
667 fprintf (file, ")[%s]", reg_name);
669 break;
670 case CONST:
671 if (special_symbolref_operand (addr, VOIDmode))
673 rtx x = XEXP (XEXP (addr, 0), 0);
674 const char *off_name;
675 const char *reg_name;
677 if (SYMBOL_REF_ZDA_P (x))
679 off_name = "zdaoff";
680 reg_name = "r0";
682 else if (SYMBOL_REF_SDA_P (x))
684 off_name = "sdaoff";
685 reg_name = "gp";
687 else if (SYMBOL_REF_TDA_P (x))
689 off_name = "tdaoff";
690 reg_name = "ep";
692 else
693 gcc_unreachable ();
695 fprintf (file, "%s(", off_name);
696 output_addr_const (file, addr);
697 fprintf (file, ")[%s]", reg_name);
699 else
700 output_addr_const (file, addr);
701 break;
702 default:
703 output_addr_const (file, addr);
704 break;
708 static bool
709 v850_print_operand_punct_valid_p (unsigned char code)
711 return code == '.';
714 /* When assemble_integer is used to emit the offsets for a switch
715 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
716 output_addr_const will normally barf at this, but it is OK to omit
717 the truncate and just emit the difference of the two labels. The
718 .hword directive will automatically handle the truncation for us.
720 Returns true if rtx was handled, false otherwise. */
722 static bool
723 v850_output_addr_const_extra (FILE * file, rtx x)
725 if (GET_CODE (x) != TRUNCATE)
726 return false;
728 x = XEXP (x, 0);
730 /* We must also handle the case where the switch table was passed a
731 constant value and so has been collapsed. In this case the first
732 label will have been deleted. In such a case it is OK to emit
733 nothing, since the table will not be used.
734 (cf gcc.c-torture/compile/990801-1.c). */
735 if (GET_CODE (x) == MINUS
736 && GET_CODE (XEXP (x, 0)) == LABEL_REF
737 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
738 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
739 return true;
741 output_addr_const (file, x);
742 return true;
745 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
746 point value. */
748 const char *
749 output_move_single (rtx * operands)
751 rtx dst = operands[0];
752 rtx src = operands[1];
754 if (REG_P (dst))
756 if (REG_P (src))
757 return "mov %1,%0";
759 else if (GET_CODE (src) == CONST_INT)
761 HOST_WIDE_INT value = INTVAL (src);
763 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
764 return "mov %1,%0";
766 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
767 return "movea %1,%.,%0";
769 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
770 return "movhi hi0(%1),%.,%0";
772 /* A random constant. */
773 else if (TARGET_V850E || TARGET_V850E2_ALL)
774 return "mov %1,%0";
775 else
776 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
779 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
781 HOST_WIDE_INT high, low;
783 const_double_split (src, &high, &low);
785 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
786 return "mov %F1,%0";
788 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
789 return "movea %F1,%.,%0";
791 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
792 return "movhi hi0(%F1),%.,%0";
794 /* A random constant. */
795 else if (TARGET_V850E || TARGET_V850E2_ALL)
796 return "mov %F1,%0";
798 else
799 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
802 else if (GET_CODE (src) == MEM)
803 return "%S1ld%W1 %1,%0";
805 else if (special_symbolref_operand (src, VOIDmode))
806 return "movea %O1(%P1),%Q1,%0";
808 else if (GET_CODE (src) == LABEL_REF
809 || GET_CODE (src) == SYMBOL_REF
810 || GET_CODE (src) == CONST)
812 if (TARGET_V850E || TARGET_V850E2_ALL)
813 return "mov hilo(%1),%0";
814 else
815 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
818 else if (GET_CODE (src) == HIGH)
819 return "movhi hi(%1),%.,%0";
821 else if (GET_CODE (src) == LO_SUM)
823 operands[2] = XEXP (src, 0);
824 operands[3] = XEXP (src, 1);
825 return "movea lo(%3),%2,%0";
829 else if (GET_CODE (dst) == MEM)
831 if (REG_P (src))
832 return "%S0st%W0 %1,%0";
834 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
835 return "%S0st%W0 %.,%0";
837 else if (GET_CODE (src) == CONST_DOUBLE
838 && CONST0_RTX (GET_MODE (dst)) == src)
839 return "%S0st%W0 %.,%0";
842 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
843 return "";
846 /* Generate comparison code. */
848 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
850 enum rtx_code code = GET_CODE (op);
852 if (GET_RTX_CLASS (code) != RTX_COMPARE
853 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
854 return 0;
856 if (mode != GET_MODE (op) && mode != VOIDmode)
857 return 0;
859 if ((GET_CODE (XEXP (op, 0)) != REG
860 || REGNO (XEXP (op, 0)) != CC_REGNUM)
861 || XEXP (op, 1) != const0_rtx)
862 return 0;
864 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
865 return code == LT;
866 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
867 return code == LE;
868 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
869 return code == EQ;
871 return 0;
875 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
877 enum rtx_code code = GET_CODE (op);
879 if (GET_RTX_CLASS (code) != RTX_COMPARE
880 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
881 return 0;
883 if (mode != GET_MODE (op) && mode != VOIDmode)
884 return 0;
886 if ((GET_CODE (XEXP (op, 0)) != REG
887 || REGNO (XEXP (op, 0)) != CC_REGNUM)
888 || XEXP (op, 1) != const0_rtx)
889 return 0;
891 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
892 return code == GT;
893 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
894 return code == GE;
895 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
896 return code == NE;
898 return 0;
901 enum machine_mode
902 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
904 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
906 switch (cond)
908 case LE:
909 return CC_FPU_LEmode;
910 case GE:
911 return CC_FPU_GEmode;
912 case LT:
913 return CC_FPU_LTmode;
914 case GT:
915 return CC_FPU_GTmode;
916 case EQ:
917 return CC_FPU_EQmode;
918 case NE:
919 return CC_FPU_NEmode;
920 default:
921 abort ();
924 return CCmode;
927 enum machine_mode
928 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
930 if (GET_MODE(op0) == DFmode)
932 switch (cond)
934 case LE:
935 emit_insn (gen_cmpdf_le_insn (op0, op1));
936 break;
937 case GE:
938 emit_insn (gen_cmpdf_ge_insn (op0, op1));
939 break;
940 case LT:
941 emit_insn (gen_cmpdf_lt_insn (op0, op1));
942 break;
943 case GT:
944 emit_insn (gen_cmpdf_gt_insn (op0, op1));
945 break;
946 case EQ:
947 emit_insn (gen_cmpdf_eq_insn (op0, op1));
948 break;
949 case NE:
950 emit_insn (gen_cmpdf_ne_insn (op0, op1));
951 break;
952 default:
953 abort ();
956 else if (GET_MODE(v850_compare_op0) == SFmode)
958 switch (cond)
960 case LE:
961 emit_insn (gen_cmpsf_le_insn(op0, op1));
962 break;
963 case GE:
964 emit_insn (gen_cmpsf_ge_insn(op0, op1));
965 break;
966 case LT:
967 emit_insn (gen_cmpsf_lt_insn(op0, op1));
968 break;
969 case GT:
970 emit_insn (gen_cmpsf_gt_insn(op0, op1));
971 break;
972 case EQ:
973 emit_insn (gen_cmpsf_eq_insn(op0, op1));
974 break;
975 case NE:
976 emit_insn (gen_cmpsf_ne_insn(op0, op1));
977 break;
978 default:
979 abort ();
982 else
984 abort ();
987 return v850_select_cc_mode (cond, op0, op1);
991 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
993 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
995 emit_insn (gen_cmpsi_insn (op0, op1));
996 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
998 else
1000 rtx cc_reg;
1001 mode = v850_gen_float_compare (cond, mode, op0, op1);
1002 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1003 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1005 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1009 /* Return maximum offset supported for a short EP memory reference of mode
1010 MODE and signedness UNSIGNEDP. */
1012 static int
1013 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1015 int max_offset = 0;
1017 switch (mode)
1019 case QImode:
1020 if (TARGET_SMALL_SLD)
1021 max_offset = (1 << 4);
1022 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1023 && unsignedp)
1024 max_offset = (1 << 4);
1025 else
1026 max_offset = (1 << 7);
1027 break;
1029 case HImode:
1030 if (TARGET_SMALL_SLD)
1031 max_offset = (1 << 5);
1032 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1033 && unsignedp)
1034 max_offset = (1 << 5);
1035 else
1036 max_offset = (1 << 8);
1037 break;
1039 case SImode:
1040 case SFmode:
1041 max_offset = (1 << 8);
1042 break;
1044 default:
1045 break;
1048 return max_offset;
1051 /* Return true if OP is a valid short EP memory reference */
1054 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1056 rtx addr, op0, op1;
1057 int max_offset;
1058 int mask;
1060 /* If we are not using the EP register on a per-function basis
1061 then do not allow this optimization at all. This is to
1062 prevent the use of the SLD/SST instructions which cannot be
1063 guaranteed to work properly due to a hardware bug. */
1064 if (!TARGET_EP)
1065 return FALSE;
1067 if (GET_CODE (op) != MEM)
1068 return FALSE;
1070 max_offset = ep_memory_offset (mode, unsigned_load);
1072 mask = GET_MODE_SIZE (mode) - 1;
1074 addr = XEXP (op, 0);
1075 if (GET_CODE (addr) == CONST)
1076 addr = XEXP (addr, 0);
1078 switch (GET_CODE (addr))
1080 default:
1081 break;
1083 case SYMBOL_REF:
1084 return SYMBOL_REF_TDA_P (addr);
1086 case REG:
1087 return REGNO (addr) == EP_REGNUM;
1089 case PLUS:
1090 op0 = XEXP (addr, 0);
1091 op1 = XEXP (addr, 1);
1092 if (GET_CODE (op1) == CONST_INT
1093 && INTVAL (op1) < max_offset
1094 && INTVAL (op1) >= 0
1095 && (INTVAL (op1) & mask) == 0)
1097 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1098 return TRUE;
1100 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1101 return TRUE;
1103 break;
1106 return FALSE;
1109 /* Substitute memory references involving a pointer, to use the ep pointer,
1110 taking care to save and preserve the ep. */
1112 static void
1113 substitute_ep_register (rtx first_insn,
1114 rtx last_insn,
1115 int uses,
1116 int regno,
1117 rtx * p_r1,
1118 rtx * p_ep)
1120 rtx reg = gen_rtx_REG (Pmode, regno);
1121 rtx insn;
1123 if (!*p_r1)
1125 df_set_regs_ever_live (1, true);
1126 *p_r1 = gen_rtx_REG (Pmode, 1);
1127 *p_ep = gen_rtx_REG (Pmode, 30);
1130 if (TARGET_DEBUG)
1131 fprintf (stderr, "\
1132 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1133 2 * (uses - 3), uses, reg_names[regno],
1134 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1135 INSN_UID (first_insn), INSN_UID (last_insn));
1137 if (GET_CODE (first_insn) == NOTE)
1138 first_insn = next_nonnote_insn (first_insn);
1140 last_insn = next_nonnote_insn (last_insn);
1141 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1143 if (GET_CODE (insn) == INSN)
1145 rtx pattern = single_set (insn);
1147 /* Replace the memory references. */
1148 if (pattern)
1150 rtx *p_mem;
1151 /* Memory operands are signed by default. */
1152 int unsignedp = FALSE;
1154 if (GET_CODE (SET_DEST (pattern)) == MEM
1155 && GET_CODE (SET_SRC (pattern)) == MEM)
1156 p_mem = (rtx *)0;
1158 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1159 p_mem = &SET_DEST (pattern);
1161 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1162 p_mem = &SET_SRC (pattern);
1164 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1166 p_mem = &XEXP (SET_SRC (pattern), 0);
1168 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1171 p_mem = &XEXP (SET_SRC (pattern), 0);
1172 unsignedp = TRUE;
1174 else
1175 p_mem = (rtx *)0;
1177 if (p_mem)
1179 rtx addr = XEXP (*p_mem, 0);
1181 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1182 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1184 else if (GET_CODE (addr) == PLUS
1185 && GET_CODE (XEXP (addr, 0)) == REG
1186 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1187 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1188 && ((INTVAL (XEXP (addr, 1)))
1189 < ep_memory_offset (GET_MODE (*p_mem),
1190 unsignedp))
1191 && ((INTVAL (XEXP (addr, 1))) >= 0))
1192 *p_mem = change_address (*p_mem, VOIDmode,
1193 gen_rtx_PLUS (Pmode,
1194 *p_ep,
1195 XEXP (addr, 1)));
1201 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1202 insn = prev_nonnote_insn (first_insn);
1203 if (insn && GET_CODE (insn) == INSN
1204 && GET_CODE (PATTERN (insn)) == SET
1205 && SET_DEST (PATTERN (insn)) == *p_ep
1206 && SET_SRC (PATTERN (insn)) == *p_r1)
1207 delete_insn (insn);
1208 else
1209 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1211 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1212 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1216 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1217 the -mep mode to copy heavily used pointers to ep to use the implicit
1218 addressing. */
1220 static void
1221 v850_reorg (void)
1223 struct
1225 int uses;
1226 rtx first_insn;
1227 rtx last_insn;
1229 regs[FIRST_PSEUDO_REGISTER];
1231 int i;
1232 int use_ep = FALSE;
1233 rtx r1 = NULL_RTX;
1234 rtx ep = NULL_RTX;
1235 rtx insn;
1236 rtx pattern;
1238 /* If not ep mode, just return now. */
1239 if (!TARGET_EP)
1240 return;
1242 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1244 regs[i].uses = 0;
1245 regs[i].first_insn = NULL_RTX;
1246 regs[i].last_insn = NULL_RTX;
1249 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1251 switch (GET_CODE (insn))
1253 /* End of basic block */
1254 default:
1255 if (!use_ep)
1257 int max_uses = -1;
1258 int max_regno = -1;
1260 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1262 if (max_uses < regs[i].uses)
1264 max_uses = regs[i].uses;
1265 max_regno = i;
1269 if (max_uses > 3)
1270 substitute_ep_register (regs[max_regno].first_insn,
1271 regs[max_regno].last_insn,
1272 max_uses, max_regno, &r1, &ep);
1275 use_ep = FALSE;
1276 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1278 regs[i].uses = 0;
1279 regs[i].first_insn = NULL_RTX;
1280 regs[i].last_insn = NULL_RTX;
1282 break;
1284 case NOTE:
1285 break;
1287 case INSN:
1288 pattern = single_set (insn);
1290 /* See if there are any memory references we can shorten */
1291 if (pattern)
1293 rtx src = SET_SRC (pattern);
1294 rtx dest = SET_DEST (pattern);
1295 rtx mem;
1296 /* Memory operands are signed by default. */
1297 int unsignedp = FALSE;
1299 /* We might have (SUBREG (MEM)) here, so just get rid of the
1300 subregs to make this code simpler. */
1301 if (GET_CODE (dest) == SUBREG
1302 && (GET_CODE (SUBREG_REG (dest)) == MEM
1303 || GET_CODE (SUBREG_REG (dest)) == REG))
1304 alter_subreg (&dest);
1305 if (GET_CODE (src) == SUBREG
1306 && (GET_CODE (SUBREG_REG (src)) == MEM
1307 || GET_CODE (SUBREG_REG (src)) == REG))
1308 alter_subreg (&src);
1310 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1311 mem = NULL_RTX;
1313 else if (GET_CODE (dest) == MEM)
1314 mem = dest;
1316 else if (GET_CODE (src) == MEM)
1317 mem = src;
1319 else if (GET_CODE (src) == SIGN_EXTEND
1320 && GET_CODE (XEXP (src, 0)) == MEM)
1321 mem = XEXP (src, 0);
1323 else if (GET_CODE (src) == ZERO_EXTEND
1324 && GET_CODE (XEXP (src, 0)) == MEM)
1326 mem = XEXP (src, 0);
1327 unsignedp = TRUE;
1329 else
1330 mem = NULL_RTX;
1332 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1333 use_ep = TRUE;
1335 else if (!use_ep && mem
1336 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1338 rtx addr = XEXP (mem, 0);
1339 int regno = -1;
1340 int short_p;
1342 if (GET_CODE (addr) == REG)
1344 short_p = TRUE;
1345 regno = REGNO (addr);
1348 else if (GET_CODE (addr) == PLUS
1349 && GET_CODE (XEXP (addr, 0)) == REG
1350 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1351 && ((INTVAL (XEXP (addr, 1)))
1352 < ep_memory_offset (GET_MODE (mem), unsignedp))
1353 && ((INTVAL (XEXP (addr, 1))) >= 0))
1355 short_p = TRUE;
1356 regno = REGNO (XEXP (addr, 0));
1359 else
1360 short_p = FALSE;
1362 if (short_p)
1364 regs[regno].uses++;
1365 regs[regno].last_insn = insn;
1366 if (!regs[regno].first_insn)
1367 regs[regno].first_insn = insn;
1371 /* Loading up a register in the basic block zaps any savings
1372 for the register */
1373 if (GET_CODE (dest) == REG)
1375 enum machine_mode mode = GET_MODE (dest);
1376 int regno;
1377 int endregno;
1379 regno = REGNO (dest);
1380 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1382 if (!use_ep)
1384 /* See if we can use the pointer before this
1385 modification. */
1386 int max_uses = -1;
1387 int max_regno = -1;
1389 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1391 if (max_uses < regs[i].uses)
1393 max_uses = regs[i].uses;
1394 max_regno = i;
1398 if (max_uses > 3
1399 && max_regno >= regno
1400 && max_regno < endregno)
1402 substitute_ep_register (regs[max_regno].first_insn,
1403 regs[max_regno].last_insn,
1404 max_uses, max_regno, &r1,
1405 &ep);
1407 /* Since we made a substitution, zap all remembered
1408 registers. */
1409 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1411 regs[i].uses = 0;
1412 regs[i].first_insn = NULL_RTX;
1413 regs[i].last_insn = NULL_RTX;
1418 for (i = regno; i < endregno; i++)
1420 regs[i].uses = 0;
1421 regs[i].first_insn = NULL_RTX;
1422 regs[i].last_insn = NULL_RTX;
1430 /* # of registers saved by the interrupt handler. */
1431 #define INTERRUPT_FIXED_NUM 5
1433 /* # of bytes for registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1436 /* # of words saved for other registers. */
1437 #define INTERRUPT_ALL_SAVE_NUM \
1438 (30 - INTERRUPT_FIXED_NUM)
1440 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1443 compute_register_save_size (long * p_reg_saved)
1445 int size = 0;
1446 int i;
1447 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1448 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1449 long reg_saved = 0;
1451 /* Count the return pointer if we need to save it. */
1452 if (crtl->profile && !call_p)
1454 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1455 call_p = 1;
1458 /* Count space for the register saves. */
1459 if (interrupt_handler)
1461 for (i = 0; i <= 31; i++)
1462 switch (i)
1464 default:
1465 if (df_regs_ever_live_p (i) || call_p)
1467 size += 4;
1468 reg_saved |= 1L << i;
1470 break;
1472 /* We don't save/restore r0 or the stack pointer */
1473 case 0:
1474 case STACK_POINTER_REGNUM:
1475 break;
1477 /* For registers with fixed use, we save them, set them to the
1478 appropriate value, and then restore them.
1479 These registers are handled specially, so don't list them
1480 on the list of registers to save in the prologue. */
1481 case 1: /* temp used to hold ep */
1482 case 4: /* gp */
1483 case 10: /* temp used to call interrupt save/restore */
1484 case 11: /* temp used to call interrupt save/restore (long call) */
1485 case EP_REGNUM: /* ep */
1486 size += 4;
1487 break;
1490 else
1492 /* Find the first register that needs to be saved. */
1493 for (i = 0; i <= 31; i++)
1494 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1495 || i == LINK_POINTER_REGNUM))
1496 break;
1498 /* If it is possible that an out-of-line helper function might be
1499 used to generate the prologue for the current function, then we
1500 need to cover the possibility that such a helper function will
1501 be used, despite the fact that there might be gaps in the list of
1502 registers that need to be saved. To detect this we note that the
1503 helper functions always push at least register r29 (provided
1504 that the function is not an interrupt handler). */
1506 if (TARGET_PROLOG_FUNCTION
1507 && (i == 2 || ((i >= 20) && (i < 30))))
1509 if (i == 2)
1511 size += 4;
1512 reg_saved |= 1L << i;
1514 i = 20;
1517 /* Helper functions save all registers between the starting
1518 register and the last register, regardless of whether they
1519 are actually used by the function or not. */
1520 for (; i <= 29; i++)
1522 size += 4;
1523 reg_saved |= 1L << i;
1526 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1528 size += 4;
1529 reg_saved |= 1L << LINK_POINTER_REGNUM;
1532 else
1534 for (; i <= 31; i++)
1535 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1536 || i == LINK_POINTER_REGNUM))
1538 size += 4;
1539 reg_saved |= 1L << i;
1544 if (p_reg_saved)
1545 *p_reg_saved = reg_saved;
1547 return size;
1551 compute_frame_size (int size, long * p_reg_saved)
1553 return (size
1554 + compute_register_save_size (p_reg_saved)
1555 + crtl->outgoing_args_size);
1558 static int
1559 use_prolog_function (int num_save, int frame_size)
1561 int alloc_stack = (4 * num_save);
1562 int unalloc_stack = frame_size - alloc_stack;
1563 int save_func_len, restore_func_len;
1564 int save_normal_len, restore_normal_len;
1566 if (! TARGET_DISABLE_CALLT)
1567 save_func_len = restore_func_len = 2;
1568 else
1569 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1571 if (unalloc_stack)
1573 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1574 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1577 /* See if we would have used ep to save the stack. */
1578 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1579 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1580 else
1581 save_normal_len = restore_normal_len = 4 * num_save;
1583 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1584 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1586 /* Don't bother checking if we don't actually save any space.
1587 This happens for instance if one register is saved and additional
1588 stack space is allocated. */
1589 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1592 void
1593 expand_prologue (void)
1595 unsigned int i;
1596 unsigned int size = get_frame_size ();
1597 unsigned int actual_fsize;
1598 unsigned int init_stack_alloc = 0;
1599 rtx save_regs[32];
1600 rtx save_all;
1601 unsigned int num_save;
1602 int code;
1603 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1604 long reg_saved = 0;
1606 actual_fsize = compute_frame_size (size, &reg_saved);
1608 /* Save/setup global registers for interrupt functions right now. */
1609 if (interrupt_handler)
1611 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1612 emit_insn (gen_callt_save_interrupt ());
1613 else
1614 emit_insn (gen_save_interrupt ());
1616 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1618 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1619 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1622 /* Identify all of the saved registers. */
1623 num_save = 0;
1624 for (i = 1; i < 32; i++)
1626 if (((1L << i) & reg_saved) != 0)
1627 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1630 /* See if we have an insn that allocates stack space and saves the particular
1631 registers we want to. */
1632 save_all = NULL_RTX;
1633 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1635 if (use_prolog_function (num_save, actual_fsize))
1637 int alloc_stack = 4 * num_save;
1638 int offset = 0;
1640 save_all = gen_rtx_PARALLEL
1641 (VOIDmode,
1642 rtvec_alloc (num_save + 1
1643 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1645 XVECEXP (save_all, 0, 0)
1646 = gen_rtx_SET (VOIDmode,
1647 stack_pointer_rtx,
1648 gen_rtx_PLUS (Pmode,
1649 stack_pointer_rtx,
1650 GEN_INT(-alloc_stack)));
1651 for (i = 0; i < num_save; i++)
1653 offset -= 4;
1654 XVECEXP (save_all, 0, i+1)
1655 = gen_rtx_SET (VOIDmode,
1656 gen_rtx_MEM (Pmode,
1657 gen_rtx_PLUS (Pmode,
1658 stack_pointer_rtx,
1659 GEN_INT(offset))),
1660 save_regs[i]);
1663 if (TARGET_DISABLE_CALLT)
1665 XVECEXP (save_all, 0, num_save + 1)
1666 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1668 if (TARGET_LONG_CALLS)
1669 XVECEXP (save_all, 0, num_save + 2)
1670 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1673 code = recog (save_all, NULL_RTX, NULL);
1674 if (code >= 0)
1676 rtx insn = emit_insn (save_all);
1677 INSN_CODE (insn) = code;
1678 actual_fsize -= alloc_stack;
1681 else
1682 save_all = NULL_RTX;
1686 /* If no prolog save function is available, store the registers the old
1687 fashioned way (one by one). */
1688 if (!save_all)
1690 /* Special case interrupt functions that save all registers for a call. */
1691 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1693 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1694 emit_insn (gen_callt_save_all_interrupt ());
1695 else
1696 emit_insn (gen_save_all_interrupt ());
1698 else
1700 int offset;
1701 /* If the stack is too big, allocate it in chunks so we can do the
1702 register saves. We use the register save size so we use the ep
1703 register. */
1704 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1705 init_stack_alloc = compute_register_save_size (NULL);
1706 else
1707 init_stack_alloc = actual_fsize;
1709 /* Save registers at the beginning of the stack frame. */
1710 offset = init_stack_alloc - 4;
1712 if (init_stack_alloc)
1713 emit_insn (gen_addsi3 (stack_pointer_rtx,
1714 stack_pointer_rtx,
1715 GEN_INT (- (signed) init_stack_alloc)));
1717 /* Save the return pointer first. */
1718 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1720 emit_move_insn (gen_rtx_MEM (SImode,
1721 plus_constant (Pmode,
1722 stack_pointer_rtx,
1723 offset)),
1724 save_regs[--num_save]);
1725 offset -= 4;
1728 for (i = 0; i < num_save; i++)
1730 emit_move_insn (gen_rtx_MEM (SImode,
1731 plus_constant (Pmode,
1732 stack_pointer_rtx,
1733 offset)),
1734 save_regs[i]);
1735 offset -= 4;
1740 /* Allocate the rest of the stack that was not allocated above (either it is
1741 > 32K or we just called a function to save the registers and needed more
1742 stack. */
1743 if (actual_fsize > init_stack_alloc)
1745 int diff = actual_fsize - init_stack_alloc;
1746 if (CONST_OK_FOR_K (-diff))
1747 emit_insn (gen_addsi3 (stack_pointer_rtx,
1748 stack_pointer_rtx,
1749 GEN_INT (-diff)));
1750 else
1752 rtx reg = gen_rtx_REG (Pmode, 12);
1753 emit_move_insn (reg, GEN_INT (-diff));
1754 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1758 /* If we need a frame pointer, set it up now. */
1759 if (frame_pointer_needed)
1760 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1764 void
1765 expand_epilogue (void)
1767 unsigned int i;
1768 unsigned int size = get_frame_size ();
1769 long reg_saved = 0;
1770 int actual_fsize = compute_frame_size (size, &reg_saved);
1771 rtx restore_regs[32];
1772 rtx restore_all;
1773 unsigned int num_restore;
1774 int code;
1775 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1777 /* Eliminate the initial stack stored by interrupt functions. */
1778 if (interrupt_handler)
1780 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1781 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1782 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1785 /* Cut off any dynamic stack created. */
1786 if (frame_pointer_needed)
1787 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1789 /* Identify all of the saved registers. */
1790 num_restore = 0;
1791 for (i = 1; i < 32; i++)
1793 if (((1L << i) & reg_saved) != 0)
1794 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1797 /* See if we have an insn that restores the particular registers we
1798 want to. */
1799 restore_all = NULL_RTX;
1801 if (TARGET_PROLOG_FUNCTION
1802 && num_restore > 0
1803 && !interrupt_handler)
1805 int alloc_stack = (4 * num_restore);
1807 /* Don't bother checking if we don't actually save any space. */
1808 if (use_prolog_function (num_restore, actual_fsize))
1810 int offset;
1811 restore_all = gen_rtx_PARALLEL (VOIDmode,
1812 rtvec_alloc (num_restore + 2));
1813 XVECEXP (restore_all, 0, 0) = ret_rtx;
1814 XVECEXP (restore_all, 0, 1)
1815 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1816 gen_rtx_PLUS (Pmode,
1817 stack_pointer_rtx,
1818 GEN_INT (alloc_stack)));
1820 offset = alloc_stack - 4;
1821 for (i = 0; i < num_restore; i++)
1823 XVECEXP (restore_all, 0, i+2)
1824 = gen_rtx_SET (VOIDmode,
1825 restore_regs[i],
1826 gen_rtx_MEM (Pmode,
1827 gen_rtx_PLUS (Pmode,
1828 stack_pointer_rtx,
1829 GEN_INT(offset))));
1830 offset -= 4;
1833 code = recog (restore_all, NULL_RTX, NULL);
1835 if (code >= 0)
1837 rtx insn;
1839 actual_fsize -= alloc_stack;
1840 if (actual_fsize)
1842 if (CONST_OK_FOR_K (actual_fsize))
1843 emit_insn (gen_addsi3 (stack_pointer_rtx,
1844 stack_pointer_rtx,
1845 GEN_INT (actual_fsize)));
1846 else
1848 rtx reg = gen_rtx_REG (Pmode, 12);
1849 emit_move_insn (reg, GEN_INT (actual_fsize));
1850 emit_insn (gen_addsi3 (stack_pointer_rtx,
1851 stack_pointer_rtx,
1852 reg));
1856 insn = emit_jump_insn (restore_all);
1857 INSN_CODE (insn) = code;
1860 else
1861 restore_all = NULL_RTX;
1865 /* If no epilogue save function is available, restore the registers the
1866 old fashioned way (one by one). */
1867 if (!restore_all)
1869 unsigned int init_stack_free;
1871 /* If the stack is large, we need to cut it down in 2 pieces. */
1872 if (interrupt_handler)
1873 init_stack_free = 0;
1874 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1875 init_stack_free = 4 * num_restore;
1876 else
1877 init_stack_free = (signed) actual_fsize;
1879 /* Deallocate the rest of the stack if it is > 32K. */
1880 if ((unsigned int) actual_fsize > init_stack_free)
1882 int diff;
1884 diff = actual_fsize - init_stack_free;
1886 if (CONST_OK_FOR_K (diff))
1887 emit_insn (gen_addsi3 (stack_pointer_rtx,
1888 stack_pointer_rtx,
1889 GEN_INT (diff)));
1890 else
1892 rtx reg = gen_rtx_REG (Pmode, 12);
1893 emit_move_insn (reg, GEN_INT (diff));
1894 emit_insn (gen_addsi3 (stack_pointer_rtx,
1895 stack_pointer_rtx,
1896 reg));
1900 /* Special case interrupt functions that save all registers
1901 for a call. */
1902 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1904 if (! TARGET_DISABLE_CALLT)
1905 emit_insn (gen_callt_restore_all_interrupt ());
1906 else
1907 emit_insn (gen_restore_all_interrupt ());
1909 else
1911 /* Restore registers from the beginning of the stack frame. */
1912 int offset = init_stack_free - 4;
1914 /* Restore the return pointer first. */
1915 if (num_restore > 0
1916 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1918 emit_move_insn (restore_regs[--num_restore],
1919 gen_rtx_MEM (SImode,
1920 plus_constant (Pmode,
1921 stack_pointer_rtx,
1922 offset)));
1923 offset -= 4;
1926 for (i = 0; i < num_restore; i++)
1928 emit_move_insn (restore_regs[i],
1929 gen_rtx_MEM (SImode,
1930 plus_constant (Pmode,
1931 stack_pointer_rtx,
1932 offset)));
1934 emit_use (restore_regs[i]);
1935 offset -= 4;
1938 /* Cut back the remainder of the stack. */
1939 if (init_stack_free)
1940 emit_insn (gen_addsi3 (stack_pointer_rtx,
1941 stack_pointer_rtx,
1942 GEN_INT (init_stack_free)));
1945 /* And return or use reti for interrupt handlers. */
1946 if (interrupt_handler)
1948 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1949 emit_insn (gen_callt_return_interrupt ());
1950 else
1951 emit_jump_insn (gen_return_interrupt ());
1953 else if (actual_fsize)
1954 emit_jump_insn (gen_return_internal ());
1955 else
1956 emit_jump_insn (gen_return_simple ());
1959 v850_interrupt_cache_p = FALSE;
1960 v850_interrupt_p = FALSE;
1963 /* Update the condition code from the insn. */
1964 void
1965 notice_update_cc (rtx body, rtx insn)
1967 switch (get_attr_cc (insn))
1969 case CC_NONE:
1970 /* Insn does not affect CC at all. */
1971 break;
1973 case CC_NONE_0HIT:
1974 /* Insn does not change CC, but the 0'th operand has been changed. */
1975 if (cc_status.value1 != 0
1976 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1977 cc_status.value1 = 0;
1978 break;
1980 case CC_SET_ZN:
1981 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1982 V,C is in an unusable state. */
1983 CC_STATUS_INIT;
1984 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
1985 cc_status.value1 = recog_data.operand[0];
1986 break;
1988 case CC_SET_ZNV:
1989 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1990 C is in an unusable state. */
1991 CC_STATUS_INIT;
1992 cc_status.flags |= CC_NO_CARRY;
1993 cc_status.value1 = recog_data.operand[0];
1994 break;
1996 case CC_COMPARE:
1997 /* The insn is a compare instruction. */
1998 CC_STATUS_INIT;
1999 cc_status.value1 = SET_SRC (body);
2000 break;
2002 case CC_CLOBBER:
2003 /* Insn doesn't leave CC in a usable state. */
2004 CC_STATUS_INIT;
2005 break;
2007 default:
2008 break;
2012 /* Retrieve the data area that has been chosen for the given decl. */
2014 v850_data_area
2015 v850_get_data_area (tree decl)
2017 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2018 return DATA_AREA_SDA;
2020 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2021 return DATA_AREA_TDA;
2023 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2024 return DATA_AREA_ZDA;
2026 return DATA_AREA_NORMAL;
2029 /* Store the indicated data area in the decl's attributes. */
2031 static void
2032 v850_set_data_area (tree decl, v850_data_area data_area)
2034 tree name;
2036 switch (data_area)
2038 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2039 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2040 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2041 default:
2042 return;
2045 DECL_ATTRIBUTES (decl) = tree_cons
2046 (name, NULL, DECL_ATTRIBUTES (decl));
2049 /* Handle an "interrupt" attribute; arguments as in
2050 struct attribute_spec.handler. */
2051 static tree
2052 v850_handle_interrupt_attribute (tree * node,
2053 tree name,
2054 tree args ATTRIBUTE_UNUSED,
2055 int flags ATTRIBUTE_UNUSED,
2056 bool * no_add_attrs)
2058 if (TREE_CODE (*node) != FUNCTION_DECL)
2060 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2061 name);
2062 *no_add_attrs = true;
2065 return NULL_TREE;
2068 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2069 struct attribute_spec.handler. */
2070 static tree
2071 v850_handle_data_area_attribute (tree* node,
2072 tree name,
2073 tree args ATTRIBUTE_UNUSED,
2074 int flags ATTRIBUTE_UNUSED,
2075 bool * no_add_attrs)
2077 v850_data_area data_area;
2078 v850_data_area area;
2079 tree decl = *node;
2081 /* Implement data area attribute. */
2082 if (is_attribute_p ("sda", name))
2083 data_area = DATA_AREA_SDA;
2084 else if (is_attribute_p ("tda", name))
2085 data_area = DATA_AREA_TDA;
2086 else if (is_attribute_p ("zda", name))
2087 data_area = DATA_AREA_ZDA;
2088 else
2089 gcc_unreachable ();
2091 switch (TREE_CODE (decl))
2093 case VAR_DECL:
2094 if (current_function_decl != NULL_TREE)
2096 error_at (DECL_SOURCE_LOCATION (decl),
2097 "data area attributes cannot be specified for "
2098 "local variables");
2099 *no_add_attrs = true;
2102 /* Drop through. */
2104 case FUNCTION_DECL:
2105 area = v850_get_data_area (decl);
2106 if (area != DATA_AREA_NORMAL && data_area != area)
2108 error ("data area of %q+D conflicts with previous declaration",
2109 decl);
2110 *no_add_attrs = true;
2112 break;
2114 default:
2115 break;
2118 return NULL_TREE;
2122 /* Return nonzero if FUNC is an interrupt function as specified
2123 by the "interrupt" attribute. */
2126 v850_interrupt_function_p (tree func)
2128 tree a;
2129 int ret = 0;
2131 if (v850_interrupt_cache_p)
2132 return v850_interrupt_p;
2134 if (TREE_CODE (func) != FUNCTION_DECL)
2135 return 0;
2137 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2138 if (a != NULL_TREE)
2139 ret = 1;
2141 else
2143 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2144 ret = a != NULL_TREE;
2147 /* Its not safe to trust global variables until after function inlining has
2148 been done. */
2149 if (reload_completed | reload_in_progress)
2150 v850_interrupt_p = ret;
2152 return ret;
2156 static void
2157 v850_encode_data_area (tree decl, rtx symbol)
2159 int flags;
2161 /* Map explicit sections into the appropriate attribute */
2162 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2164 if (DECL_SECTION_NAME (decl))
2166 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2168 if (streq (name, ".zdata") || streq (name, ".zbss"))
2169 v850_set_data_area (decl, DATA_AREA_ZDA);
2171 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2172 v850_set_data_area (decl, DATA_AREA_SDA);
2174 else if (streq (name, ".tdata"))
2175 v850_set_data_area (decl, DATA_AREA_TDA);
2178 /* If no attribute, support -m{zda,sda,tda}=n */
2179 else
2181 int size = int_size_in_bytes (TREE_TYPE (decl));
2182 if (size <= 0)
2185 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2186 v850_set_data_area (decl, DATA_AREA_TDA);
2188 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2189 v850_set_data_area (decl, DATA_AREA_SDA);
2191 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2192 v850_set_data_area (decl, DATA_AREA_ZDA);
2195 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2196 return;
2199 flags = SYMBOL_REF_FLAGS (symbol);
2200 switch (v850_get_data_area (decl))
2202 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2203 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2204 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2205 default: gcc_unreachable ();
2207 SYMBOL_REF_FLAGS (symbol) = flags;
2210 static void
2211 v850_encode_section_info (tree decl, rtx rtl, int first)
2213 default_encode_section_info (decl, rtl, first);
2215 if (TREE_CODE (decl) == VAR_DECL
2216 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2217 v850_encode_data_area (decl, XEXP (rtl, 0));
2220 /* Construct a JR instruction to a routine that will perform the equivalent of
2221 the RTL passed in as an argument. This RTL is a function epilogue that
2222 pops registers off the stack and possibly releases some extra stack space
2223 as well. The code has already verified that the RTL matches these
2224 requirements. */
2226 char *
2227 construct_restore_jr (rtx op)
2229 int count = XVECLEN (op, 0);
2230 int stack_bytes;
2231 unsigned long int mask;
2232 unsigned long int first;
2233 unsigned long int last;
2234 int i;
2235 static char buff [100]; /* XXX */
2237 if (count <= 2)
2239 error ("bogus JR construction: %d", count);
2240 return NULL;
2243 /* Work out how many bytes to pop off the stack before retrieving
2244 registers. */
2245 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2246 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2247 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2249 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2251 /* Each pop will remove 4 bytes from the stack.... */
2252 stack_bytes -= (count - 2) * 4;
2254 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2255 if (stack_bytes != 0)
2257 error ("bad amount of stack space removal: %d", stack_bytes);
2258 return NULL;
2261 /* Now compute the bit mask of registers to push. */
2262 mask = 0;
2263 for (i = 2; i < count; i++)
2265 rtx vector_element = XVECEXP (op, 0, i);
2267 gcc_assert (GET_CODE (vector_element) == SET);
2268 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2269 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2270 SImode));
2272 mask |= 1 << REGNO (SET_DEST (vector_element));
2275 /* Scan for the first register to pop. */
2276 for (first = 0; first < 32; first++)
2278 if (mask & (1 << first))
2279 break;
2282 gcc_assert (first < 32);
2284 /* Discover the last register to pop. */
2285 if (mask & (1 << LINK_POINTER_REGNUM))
2287 last = LINK_POINTER_REGNUM;
2289 else
2291 gcc_assert (!stack_bytes);
2292 gcc_assert (mask & (1 << 29));
2294 last = 29;
2297 /* Note, it is possible to have gaps in the register mask.
2298 We ignore this here, and generate a JR anyway. We will
2299 be popping more registers than is strictly necessary, but
2300 it does save code space. */
2302 if (TARGET_LONG_CALLS)
2304 char name[40];
2306 if (first == last)
2307 sprintf (name, "__return_%s", reg_names [first]);
2308 else
2309 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2311 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2312 name, name);
2314 else
2316 if (first == last)
2317 sprintf (buff, "jr __return_%s", reg_names [first]);
2318 else
2319 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2322 return buff;
2326 /* Construct a JARL instruction to a routine that will perform the equivalent
2327 of the RTL passed as a parameter. This RTL is a function prologue that
2328 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2329 some stack space as well. The code has already verified that the RTL
2330 matches these requirements. */
2331 char *
2332 construct_save_jarl (rtx op)
2334 int count = XVECLEN (op, 0);
2335 int stack_bytes;
2336 unsigned long int mask;
2337 unsigned long int first;
2338 unsigned long int last;
2339 int i;
2340 static char buff [100]; /* XXX */
2342 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2344 error ("bogus JARL construction: %d", count);
2345 return NULL;
2348 /* Paranoia. */
2349 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2350 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2351 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2352 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2354 /* Work out how many bytes to push onto the stack after storing the
2355 registers. */
2356 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2358 /* Each push will put 4 bytes from the stack.... */
2359 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2361 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2362 if (stack_bytes != 0)
2364 error ("bad amount of stack space removal: %d", stack_bytes);
2365 return NULL;
2368 /* Now compute the bit mask of registers to push. */
2369 mask = 0;
2370 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2372 rtx vector_element = XVECEXP (op, 0, i);
2374 gcc_assert (GET_CODE (vector_element) == SET);
2375 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2376 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2377 SImode));
2379 mask |= 1 << REGNO (SET_SRC (vector_element));
2382 /* Scan for the first register to push. */
2383 for (first = 0; first < 32; first++)
2385 if (mask & (1 << first))
2386 break;
2389 gcc_assert (first < 32);
2391 /* Discover the last register to push. */
2392 if (mask & (1 << LINK_POINTER_REGNUM))
2394 last = LINK_POINTER_REGNUM;
2396 else
2398 gcc_assert (!stack_bytes);
2399 gcc_assert (mask & (1 << 29));
2401 last = 29;
2404 /* Note, it is possible to have gaps in the register mask.
2405 We ignore this here, and generate a JARL anyway. We will
2406 be pushing more registers than is strictly necessary, but
2407 it does save code space. */
2409 if (TARGET_LONG_CALLS)
2411 char name[40];
2413 if (first == last)
2414 sprintf (name, "__save_%s", reg_names [first]);
2415 else
2416 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2418 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2419 name, name);
2421 else
2423 if (first == last)
2424 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2425 else
2426 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2427 reg_names [last]);
2430 return buff;
2433 extern tree last_assemble_variable_decl;
2434 extern int size_directive_output;
2436 /* A version of asm_output_aligned_bss() that copes with the special
2437 data areas of the v850. */
2438 void
2439 v850_output_aligned_bss (FILE * file,
2440 tree decl,
2441 const char * name,
2442 unsigned HOST_WIDE_INT size,
2443 int align)
2445 switch (v850_get_data_area (decl))
2447 case DATA_AREA_ZDA:
2448 switch_to_section (zbss_section);
2449 break;
2451 case DATA_AREA_SDA:
2452 switch_to_section (sbss_section);
2453 break;
2455 case DATA_AREA_TDA:
2456 switch_to_section (tdata_section);
2458 default:
2459 switch_to_section (bss_section);
2460 break;
2463 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2464 #ifdef ASM_DECLARE_OBJECT_NAME
2465 last_assemble_variable_decl = decl;
2466 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2467 #else
2468 /* Standard thing is just output label for the object. */
2469 ASM_OUTPUT_LABEL (file, name);
2470 #endif /* ASM_DECLARE_OBJECT_NAME */
2471 ASM_OUTPUT_SKIP (file, size ? size : 1);
2474 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2475 void
2476 v850_output_common (FILE * file,
2477 tree decl,
2478 const char * name,
2479 int size,
2480 int align)
2482 if (decl == NULL_TREE)
2484 fprintf (file, "%s", COMMON_ASM_OP);
2486 else
2488 switch (v850_get_data_area (decl))
2490 case DATA_AREA_ZDA:
2491 fprintf (file, "%s", ZCOMMON_ASM_OP);
2492 break;
2494 case DATA_AREA_SDA:
2495 fprintf (file, "%s", SCOMMON_ASM_OP);
2496 break;
2498 case DATA_AREA_TDA:
2499 fprintf (file, "%s", TCOMMON_ASM_OP);
2500 break;
2502 default:
2503 fprintf (file, "%s", COMMON_ASM_OP);
2504 break;
2508 assemble_name (file, name);
2509 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2512 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2513 void
2514 v850_output_local (FILE * file,
2515 tree decl,
2516 const char * name,
2517 int size,
2518 int align)
2520 fprintf (file, "%s", LOCAL_ASM_OP);
2521 assemble_name (file, name);
2522 fprintf (file, "\n");
2524 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2527 /* Add data area to the given declaration if a ghs data area pragma is
2528 currently in effect (#pragma ghs startXXX/endXXX). */
2529 static void
2530 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2532 if (data_area_stack
2533 && data_area_stack->data_area
2534 && current_function_decl == NULL_TREE
2535 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2536 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2537 v850_set_data_area (decl, data_area_stack->data_area);
2539 /* Initialize the default names of the v850 specific sections,
2540 if this has not been done before. */
2542 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2544 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2545 = build_string (sizeof (".sdata")-1, ".sdata");
2547 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2548 = build_string (sizeof (".rosdata")-1, ".rosdata");
2550 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2551 = build_string (sizeof (".tdata")-1, ".tdata");
2553 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2554 = build_string (sizeof (".zdata")-1, ".zdata");
2556 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2557 = build_string (sizeof (".rozdata")-1, ".rozdata");
2560 if (current_function_decl == NULL_TREE
2561 && (TREE_CODE (decl) == VAR_DECL
2562 || TREE_CODE (decl) == CONST_DECL
2563 || TREE_CODE (decl) == FUNCTION_DECL)
2564 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2565 && !DECL_SECTION_NAME (decl))
2567 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2568 tree chosen_section;
2570 if (TREE_CODE (decl) == FUNCTION_DECL)
2571 kind = GHS_SECTION_KIND_TEXT;
2572 else
2574 /* First choose a section kind based on the data area of the decl. */
2575 switch (v850_get_data_area (decl))
2577 default:
2578 gcc_unreachable ();
2580 case DATA_AREA_SDA:
2581 kind = ((TREE_READONLY (decl))
2582 ? GHS_SECTION_KIND_ROSDATA
2583 : GHS_SECTION_KIND_SDATA);
2584 break;
2586 case DATA_AREA_TDA:
2587 kind = GHS_SECTION_KIND_TDATA;
2588 break;
2590 case DATA_AREA_ZDA:
2591 kind = ((TREE_READONLY (decl))
2592 ? GHS_SECTION_KIND_ROZDATA
2593 : GHS_SECTION_KIND_ZDATA);
2594 break;
2596 case DATA_AREA_NORMAL: /* default data area */
2597 if (TREE_READONLY (decl))
2598 kind = GHS_SECTION_KIND_RODATA;
2599 else if (DECL_INITIAL (decl))
2600 kind = GHS_SECTION_KIND_DATA;
2601 else
2602 kind = GHS_SECTION_KIND_BSS;
2606 /* Now, if the section kind has been explicitly renamed,
2607 then attach a section attribute. */
2608 chosen_section = GHS_current_section_names [(int) kind];
2610 /* Otherwise, if this kind of section needs an explicit section
2611 attribute, then also attach one. */
2612 if (chosen_section == NULL)
2613 chosen_section = GHS_default_section_names [(int) kind];
2615 if (chosen_section)
2617 /* Only set the section name if specified by a pragma, because
2618 otherwise it will force those variables to get allocated storage
2619 in this module, rather than by the linker. */
2620 DECL_SECTION_NAME (decl) = chosen_section;
2625 /* Construct a DISPOSE instruction that is the equivalent of
2626 the given RTX. We have already verified that this should
2627 be possible. */
2629 char *
2630 construct_dispose_instruction (rtx op)
2632 int count = XVECLEN (op, 0);
2633 int stack_bytes;
2634 unsigned long int mask;
2635 int i;
2636 static char buff[ 100 ]; /* XXX */
2637 int use_callt = 0;
2639 if (count <= 2)
2641 error ("bogus DISPOSE construction: %d", count);
2642 return NULL;
2645 /* Work out how many bytes to pop off the
2646 stack before retrieving registers. */
2647 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2648 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2649 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2651 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2653 /* Each pop will remove 4 bytes from the stack.... */
2654 stack_bytes -= (count - 2) * 4;
2656 /* Make sure that the amount we are popping
2657 will fit into the DISPOSE instruction. */
2658 if (stack_bytes > 128)
2660 error ("too much stack space to dispose of: %d", stack_bytes);
2661 return NULL;
2664 /* Now compute the bit mask of registers to push. */
2665 mask = 0;
2667 for (i = 2; i < count; i++)
2669 rtx vector_element = XVECEXP (op, 0, i);
2671 gcc_assert (GET_CODE (vector_element) == SET);
2672 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2673 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2674 SImode));
2676 if (REGNO (SET_DEST (vector_element)) == 2)
2677 use_callt = 1;
2678 else
2679 mask |= 1 << REGNO (SET_DEST (vector_element));
2682 if (! TARGET_DISABLE_CALLT
2683 && (use_callt || stack_bytes == 0))
2685 if (use_callt)
2687 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2688 return buff;
2690 else
2692 for (i = 20; i < 32; i++)
2693 if (mask & (1 << i))
2694 break;
2696 if (i == 31)
2697 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2698 else
2699 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2700 i, (mask & (1 << 31)) ? "31c" : "29");
2703 else
2705 static char regs [100]; /* XXX */
2706 int done_one;
2708 /* Generate the DISPOSE instruction. Note we could just issue the
2709 bit mask as a number as the assembler can cope with this, but for
2710 the sake of our readers we turn it into a textual description. */
2711 regs[0] = 0;
2712 done_one = 0;
2714 for (i = 20; i < 32; i++)
2716 if (mask & (1 << i))
2718 int first;
2720 if (done_one)
2721 strcat (regs, ", ");
2722 else
2723 done_one = 1;
2725 first = i;
2726 strcat (regs, reg_names[ first ]);
2728 for (i++; i < 32; i++)
2729 if ((mask & (1 << i)) == 0)
2730 break;
2732 if (i > first + 1)
2734 strcat (regs, " - ");
2735 strcat (regs, reg_names[ i - 1 ] );
2740 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2743 return buff;
2746 /* Construct a PREPARE instruction that is the equivalent of
2747 the given RTL. We have already verified that this should
2748 be possible. */
2750 char *
2751 construct_prepare_instruction (rtx op)
2753 int count;
2754 int stack_bytes;
2755 unsigned long int mask;
2756 int i;
2757 static char buff[ 100 ]; /* XXX */
2758 int use_callt = 0;
2760 if (XVECLEN (op, 0) <= 1)
2762 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2763 return NULL;
2766 /* Work out how many bytes to push onto
2767 the stack after storing the registers. */
2768 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2769 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2770 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2772 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2775 /* Make sure that the amount we are popping
2776 will fit into the DISPOSE instruction. */
2777 if (stack_bytes < -128)
2779 error ("too much stack space to prepare: %d", stack_bytes);
2780 return NULL;
2783 /* Now compute the bit mask of registers to push. */
2784 count = 0;
2785 mask = 0;
2786 for (i = 1; i < XVECLEN (op, 0); i++)
2788 rtx vector_element = XVECEXP (op, 0, i);
2790 if (GET_CODE (vector_element) == CLOBBER)
2791 continue;
2793 gcc_assert (GET_CODE (vector_element) == SET);
2794 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2795 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2796 SImode));
2798 if (REGNO (SET_SRC (vector_element)) == 2)
2799 use_callt = 1;
2800 else
2801 mask |= 1 << REGNO (SET_SRC (vector_element));
2802 count++;
2805 stack_bytes += count * 4;
2807 if ((! TARGET_DISABLE_CALLT)
2808 && (use_callt || stack_bytes == 0))
2810 if (use_callt)
2812 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2813 return buff;
2816 for (i = 20; i < 32; i++)
2817 if (mask & (1 << i))
2818 break;
2820 if (i == 31)
2821 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2822 else
2823 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2824 i, (mask & (1 << 31)) ? "31c" : "29");
2826 else
2828 static char regs [100]; /* XXX */
2829 int done_one;
2832 /* Generate the PREPARE instruction. Note we could just issue the
2833 bit mask as a number as the assembler can cope with this, but for
2834 the sake of our readers we turn it into a textual description. */
2835 regs[0] = 0;
2836 done_one = 0;
2838 for (i = 20; i < 32; i++)
2840 if (mask & (1 << i))
2842 int first;
2844 if (done_one)
2845 strcat (regs, ", ");
2846 else
2847 done_one = 1;
2849 first = i;
2850 strcat (regs, reg_names[ first ]);
2852 for (i++; i < 32; i++)
2853 if ((mask & (1 << i)) == 0)
2854 break;
2856 if (i > first + 1)
2858 strcat (regs, " - ");
2859 strcat (regs, reg_names[ i - 1 ] );
2864 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2867 return buff;
2870 /* Return an RTX indicating where the return address to the
2871 calling function can be found. */
2874 v850_return_addr (int count)
2876 if (count != 0)
2877 return const0_rtx;
2879 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2882 /* Implement TARGET_ASM_INIT_SECTIONS. */
2884 static void
2885 v850_asm_init_sections (void)
2887 rosdata_section
2888 = get_unnamed_section (0, output_section_asm_op,
2889 "\t.section .rosdata,\"a\"");
2891 rozdata_section
2892 = get_unnamed_section (0, output_section_asm_op,
2893 "\t.section .rozdata,\"a\"");
2895 tdata_section
2896 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2897 "\t.section .tdata,\"aw\"");
2899 zdata_section
2900 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2901 "\t.section .zdata,\"aw\"");
2903 zbss_section
2904 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2905 output_section_asm_op,
2906 "\t.section .zbss,\"aw\"");
2909 static section *
2910 v850_select_section (tree exp,
2911 int reloc ATTRIBUTE_UNUSED,
2912 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2914 if (TREE_CODE (exp) == VAR_DECL)
2916 int is_const;
2917 if (!TREE_READONLY (exp)
2918 || TREE_SIDE_EFFECTS (exp)
2919 || !DECL_INITIAL (exp)
2920 || (DECL_INITIAL (exp) != error_mark_node
2921 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2922 is_const = FALSE;
2923 else
2924 is_const = TRUE;
2926 switch (v850_get_data_area (exp))
2928 case DATA_AREA_ZDA:
2929 return is_const ? rozdata_section : zdata_section;
2931 case DATA_AREA_TDA:
2932 return tdata_section;
2934 case DATA_AREA_SDA:
2935 return is_const ? rosdata_section : sdata_section;
2937 default:
2938 return is_const ? readonly_data_section : data_section;
2941 return readonly_data_section;
2944 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2946 static bool
2947 v850_function_value_regno_p (const unsigned int regno)
2949 return (regno == 10);
2952 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2954 static bool
2955 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2957 /* Return values > 8 bytes in length in memory. */
2958 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
2961 /* Worker function for TARGET_FUNCTION_VALUE. */
2963 static rtx
2964 v850_function_value (const_tree valtype,
2965 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2966 bool outgoing ATTRIBUTE_UNUSED)
2968 return gen_rtx_REG (TYPE_MODE (valtype), 10);
2972 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2974 static void
2975 v850_setup_incoming_varargs (cumulative_args_t ca,
2976 enum machine_mode mode ATTRIBUTE_UNUSED,
2977 tree type ATTRIBUTE_UNUSED,
2978 int *pretend_arg_size ATTRIBUTE_UNUSED,
2979 int second_time ATTRIBUTE_UNUSED)
2981 get_cumulative_args (ca)->anonymous_args = (!TARGET_GHS ? 1 : 0);
2984 /* Worker function for TARGET_CAN_ELIMINATE. */
2986 static bool
2987 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2989 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2992 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2994 If TARGET_APP_REGS is not defined then add r2 and r5 to
2995 the pool of fixed registers. See PR 14505. */
2997 static void
2998 v850_conditional_register_usage (void)
3000 if (TARGET_APP_REGS)
3002 fixed_regs[2] = 0; call_used_regs[2] = 0;
3003 fixed_regs[5] = 0; call_used_regs[5] = 1;
3007 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3009 static void
3010 v850_asm_trampoline_template (FILE *f)
3012 fprintf (f, "\tjarl .+4,r12\n");
3013 fprintf (f, "\tld.w 12[r12],r20\n");
3014 fprintf (f, "\tld.w 16[r12],r12\n");
3015 fprintf (f, "\tjmp [r12]\n");
3016 fprintf (f, "\tnop\n");
3017 fprintf (f, "\t.long 0\n");
3018 fprintf (f, "\t.long 0\n");
3021 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3023 static void
3024 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3026 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3028 emit_block_move (m_tramp, assemble_trampoline_template (),
3029 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3031 mem = adjust_address (m_tramp, SImode, 16);
3032 emit_move_insn (mem, chain_value);
3033 mem = adjust_address (m_tramp, SImode, 20);
3034 emit_move_insn (mem, fnaddr);
3037 static int
3038 v850_issue_rate (void)
3040 return (TARGET_V850E2_ALL? 2 : 1);
3043 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3045 static bool
3046 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3048 return (GET_CODE (x) == CONST_DOUBLE
3049 || !(GET_CODE (x) == CONST
3050 && GET_CODE (XEXP (x, 0)) == PLUS
3051 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3052 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3053 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3056 static int
3057 v850_memory_move_cost (enum machine_mode mode,
3058 reg_class_t reg_class ATTRIBUTE_UNUSED,
3059 bool in)
3061 switch (GET_MODE_SIZE (mode))
3063 case 0:
3064 return in ? 24 : 8;
3065 case 1:
3066 case 2:
3067 case 3:
3068 case 4:
3069 return in ? 6 : 2;
3070 default:
3071 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3075 /* V850 specific attributes. */
3077 static const struct attribute_spec v850_attribute_table[] =
3079 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3080 affects_type_identity } */
3081 { "interrupt_handler", 0, 0, true, false, false,
3082 v850_handle_interrupt_attribute, false },
3083 { "interrupt", 0, 0, true, false, false,
3084 v850_handle_interrupt_attribute, false },
3085 { "sda", 0, 0, true, false, false,
3086 v850_handle_data_area_attribute, false },
3087 { "tda", 0, 0, true, false, false,
3088 v850_handle_data_area_attribute, false },
3089 { "zda", 0, 0, true, false, false,
3090 v850_handle_data_area_attribute, false },
3091 { NULL, 0, 0, false, false, false, NULL, false }
3094 /* Initialize the GCC target structure. */
3096 #undef TARGET_MEMORY_MOVE_COST
3097 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3099 #undef TARGET_ASM_ALIGNED_HI_OP
3100 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3102 #undef TARGET_PRINT_OPERAND
3103 #define TARGET_PRINT_OPERAND v850_print_operand
3104 #undef TARGET_PRINT_OPERAND_ADDRESS
3105 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3106 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3107 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3109 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3110 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3112 #undef TARGET_ATTRIBUTE_TABLE
3113 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3115 #undef TARGET_INSERT_ATTRIBUTES
3116 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3118 #undef TARGET_ASM_SELECT_SECTION
3119 #define TARGET_ASM_SELECT_SECTION v850_select_section
3121 /* The assembler supports switchable .bss sections, but
3122 v850_select_section doesn't yet make use of them. */
3123 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3124 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3126 #undef TARGET_ENCODE_SECTION_INFO
3127 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3129 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3130 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3132 #undef TARGET_RTX_COSTS
3133 #define TARGET_RTX_COSTS v850_rtx_costs
3135 #undef TARGET_ADDRESS_COST
3136 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3138 #undef TARGET_MACHINE_DEPENDENT_REORG
3139 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3141 #undef TARGET_SCHED_ISSUE_RATE
3142 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3144 #undef TARGET_FUNCTION_VALUE_REGNO_P
3145 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3146 #undef TARGET_FUNCTION_VALUE
3147 #define TARGET_FUNCTION_VALUE v850_function_value
3149 #undef TARGET_PROMOTE_PROTOTYPES
3150 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3152 #undef TARGET_RETURN_IN_MEMORY
3153 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3155 #undef TARGET_PASS_BY_REFERENCE
3156 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3158 #undef TARGET_CALLEE_COPIES
3159 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3161 #undef TARGET_SETUP_INCOMING_VARARGS
3162 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3164 #undef TARGET_ARG_PARTIAL_BYTES
3165 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3167 #undef TARGET_FUNCTION_ARG
3168 #define TARGET_FUNCTION_ARG v850_function_arg
3170 #undef TARGET_FUNCTION_ARG_ADVANCE
3171 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3173 #undef TARGET_CAN_ELIMINATE
3174 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3176 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3177 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3179 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3180 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3181 #undef TARGET_TRAMPOLINE_INIT
3182 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3184 #undef TARGET_STRICT_ARGUMENT_NAMING
3185 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3187 #undef TARGET_LEGITIMATE_CONSTANT_P
3188 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3190 struct gcc_target targetm = TARGET_INITIALIZER;
3192 #include "gt-v850.h"