PR c/81544 - attribute noreturn and warn_unused_result on the same function accepted
[official-gcc.git] / gcc / config / v850 / v850.c
blob3d0bbf55e2481ef8b1439bf65e24c57f8ecf246e
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2017 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "attribs.h"
33 #include "insn-config.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "calls.h"
41 #include "conditions.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "expr.h"
45 #include "cfgrtl.h"
46 #include "builtins.h"
48 /* This file should be included last. */
49 #include "target-def.h"
51 #ifndef streq
52 #define streq(a,b) (strcmp (a, b) == 0)
53 #endif
55 static void v850_print_operand_address (FILE *, machine_mode, rtx);
57 /* Names of the various data areas used on the v850. */
58 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
59 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
61 /* Track the current data area set by the data area pragma (which
62 can be nested). Tested by check_default_data_area. */
63 data_area_stack_element * data_area_stack = NULL;
65 /* True if we don't need to check any more if the current
66 function is an interrupt handler. */
67 static int v850_interrupt_cache_p = FALSE;
69 rtx v850_compare_op0, v850_compare_op1;
71 /* Whether current function is an interrupt handler. */
72 static int v850_interrupt_p = FALSE;
74 static GTY(()) section * rosdata_section;
75 static GTY(()) section * rozdata_section;
76 static GTY(()) section * tdata_section;
77 static GTY(()) section * zdata_section;
78 static GTY(()) section * zbss_section;
80 /* We use this to wrap all emitted insns in the prologue. */
81 static rtx
82 F (rtx x)
84 if (GET_CODE (x) != CLOBBER)
85 RTX_FRAME_RELATED_P (x) = 1;
86 return x;
89 /* Mark all the subexpressions of the PARALLEL rtx PAR as
90 frame-related. Return PAR.
92 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
93 PARALLEL rtx other than the first if they do not have the
94 FRAME_RELATED flag set on them. */
96 static rtx
97 v850_all_frame_related (rtx par)
99 int len = XVECLEN (par, 0);
100 int i;
102 gcc_assert (GET_CODE (par) == PARALLEL);
103 for (i = 0; i < len; i++)
104 F (XVECEXP (par, 0, i));
106 return par;
109 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
110 Specify whether to pass the argument by reference. */
112 static bool
113 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
114 machine_mode mode, const_tree type,
115 bool named ATTRIBUTE_UNUSED)
117 unsigned HOST_WIDE_INT size;
119 if (!TARGET_GCC_ABI)
120 return 0;
122 if (type)
123 size = int_size_in_bytes (type);
124 else
125 size = GET_MODE_SIZE (mode);
127 return size > 8;
130 /* Return an RTX to represent where an argument with mode MODE
131 and type TYPE will be passed to a function. If the result
132 is NULL_RTX, the argument will be pushed. */
134 static rtx
135 v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
136 const_tree type, bool named)
138 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
139 rtx result = NULL_RTX;
140 int size, align;
142 if (!named)
143 return NULL_RTX;
145 if (mode == BLKmode)
146 size = int_size_in_bytes (type);
147 else
148 size = GET_MODE_SIZE (mode);
150 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
152 if (size < 1)
154 /* Once we have stopped using argument registers, do not start up again. */
155 cum->nbytes = 4 * UNITS_PER_WORD;
156 return NULL_RTX;
159 if (!TARGET_GCC_ABI)
160 align = UNITS_PER_WORD;
161 else if (size <= UNITS_PER_WORD && type)
162 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
163 else
164 align = size;
166 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
168 if (cum->nbytes > 4 * UNITS_PER_WORD)
169 return NULL_RTX;
171 if (type == NULL_TREE
172 && cum->nbytes + size > 4 * UNITS_PER_WORD)
173 return NULL_RTX;
175 switch (cum->nbytes / UNITS_PER_WORD)
177 case 0:
178 result = gen_rtx_REG (mode, 6);
179 break;
180 case 1:
181 result = gen_rtx_REG (mode, 7);
182 break;
183 case 2:
184 result = gen_rtx_REG (mode, 8);
185 break;
186 case 3:
187 result = gen_rtx_REG (mode, 9);
188 break;
189 default:
190 result = NULL_RTX;
193 return result;
196 /* Return the number of bytes which must be put into registers
197 for values which are part in registers and part in memory. */
198 static int
199 v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
200 tree type, bool named)
202 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
203 int size, align;
205 if (!named)
206 return 0;
208 if (mode == BLKmode)
209 size = int_size_in_bytes (type);
210 else
211 size = GET_MODE_SIZE (mode);
213 if (size < 1)
214 size = 1;
216 if (!TARGET_GCC_ABI)
217 align = UNITS_PER_WORD;
218 else if (type)
219 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
220 else
221 align = size;
223 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
225 if (cum->nbytes > 4 * UNITS_PER_WORD)
226 return 0;
228 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
229 return 0;
231 if (type == NULL_TREE
232 && cum->nbytes + size > 4 * UNITS_PER_WORD)
233 return 0;
235 return 4 * UNITS_PER_WORD - cum->nbytes;
238 /* Update the data in CUM to advance over an argument
239 of mode MODE and data type TYPE.
240 (TYPE is null for libcalls where that information may not be available.) */
242 static void
243 v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
244 const_tree type, bool named ATTRIBUTE_UNUSED)
246 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
248 if (!TARGET_GCC_ABI)
249 cum->nbytes += (((mode != BLKmode
250 ? GET_MODE_SIZE (mode)
251 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
252 & -UNITS_PER_WORD);
253 else
254 cum->nbytes += (((type && int_size_in_bytes (type) > 8
255 ? GET_MODE_SIZE (Pmode)
256 : (mode != BLKmode
257 ? GET_MODE_SIZE (mode)
258 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
259 & -UNITS_PER_WORD);
262 /* Return the high and low words of a CONST_DOUBLE */
264 static void
265 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
267 if (GET_CODE (x) == CONST_DOUBLE)
269 long t[2];
271 switch (GET_MODE (x))
273 case E_DFmode:
274 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x), t);
275 *p_high = t[1]; /* since v850 is little endian */
276 *p_low = t[0]; /* high is second word */
277 return;
279 case E_SFmode:
280 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), *p_high);
281 *p_low = 0;
282 return;
284 case E_VOIDmode:
285 case E_DImode:
286 *p_high = CONST_DOUBLE_HIGH (x);
287 *p_low = CONST_DOUBLE_LOW (x);
288 return;
290 default:
291 break;
295 fatal_insn ("const_double_split got a bad insn:", x);
299 /* Return the cost of the rtx R with code CODE. */
301 static int
302 const_costs_int (HOST_WIDE_INT value, int zero_cost)
304 if (CONST_OK_FOR_I (value))
305 return zero_cost;
306 else if (CONST_OK_FOR_J (value))
307 return 1;
308 else if (CONST_OK_FOR_K (value))
309 return 2;
310 else
311 return 4;
314 static int
315 const_costs (rtx r, enum rtx_code c)
317 HOST_WIDE_INT high, low;
319 switch (c)
321 case CONST_INT:
322 return const_costs_int (INTVAL (r), 0);
324 case CONST_DOUBLE:
325 const_double_split (r, &high, &low);
326 if (GET_MODE (r) == SFmode)
327 return const_costs_int (high, 1);
328 else
329 return const_costs_int (high, 1) + const_costs_int (low, 1);
331 case SYMBOL_REF:
332 case LABEL_REF:
333 case CONST:
334 return 2;
336 case HIGH:
337 return 1;
339 default:
340 return 4;
344 static bool
345 v850_rtx_costs (rtx x, machine_mode mode, int outer_code,
346 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
348 enum rtx_code code = GET_CODE (x);
350 switch (code)
352 case CONST_INT:
353 case CONST_DOUBLE:
354 case CONST:
355 case SYMBOL_REF:
356 case LABEL_REF:
357 *total = COSTS_N_INSNS (const_costs (x, code));
358 return true;
360 case MOD:
361 case DIV:
362 case UMOD:
363 case UDIV:
364 if (TARGET_V850E && !speed)
365 *total = 6;
366 else
367 *total = 60;
368 return true;
370 case MULT:
371 if (TARGET_V850E
372 && (mode == SImode || mode == HImode || mode == QImode))
374 if (GET_CODE (XEXP (x, 1)) == REG)
375 *total = 4;
376 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
378 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
379 *total = 6;
380 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
381 *total = 10;
384 else
385 *total = 20;
386 return true;
388 case ZERO_EXTRACT:
389 if (outer_code == COMPARE)
390 *total = 0;
391 return false;
393 default:
394 return false;
398 /* Print operand X using operand code CODE to assembly language output file
399 FILE. */
401 static void
402 v850_print_operand (FILE * file, rtx x, int code)
404 HOST_WIDE_INT high, low;
406 switch (code)
408 case 'c':
409 /* We use 'c' operands with symbols for .vtinherit. */
410 if (GET_CODE (x) == SYMBOL_REF)
412 output_addr_const(file, x);
413 break;
415 /* Fall through. */
416 case 'b':
417 case 'B':
418 case 'C':
419 switch ((code == 'B' || code == 'C')
420 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
422 case NE:
423 if (code == 'c' || code == 'C')
424 fprintf (file, "nz");
425 else
426 fprintf (file, "ne");
427 break;
428 case EQ:
429 if (code == 'c' || code == 'C')
430 fprintf (file, "z");
431 else
432 fprintf (file, "e");
433 break;
434 case GE:
435 fprintf (file, "ge");
436 break;
437 case GT:
438 fprintf (file, "gt");
439 break;
440 case LE:
441 fprintf (file, "le");
442 break;
443 case LT:
444 fprintf (file, "lt");
445 break;
446 case GEU:
447 fprintf (file, "nl");
448 break;
449 case GTU:
450 fprintf (file, "h");
451 break;
452 case LEU:
453 fprintf (file, "nh");
454 break;
455 case LTU:
456 fprintf (file, "l");
457 break;
458 default:
459 gcc_unreachable ();
461 break;
462 case 'F': /* High word of CONST_DOUBLE. */
463 switch (GET_CODE (x))
465 case CONST_INT:
466 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
467 break;
469 case CONST_DOUBLE:
470 const_double_split (x, &high, &low);
471 fprintf (file, "%ld", (long) high);
472 break;
474 default:
475 gcc_unreachable ();
477 break;
478 case 'G': /* Low word of CONST_DOUBLE. */
479 switch (GET_CODE (x))
481 case CONST_INT:
482 fprintf (file, "%ld", (long) INTVAL (x));
483 break;
485 case CONST_DOUBLE:
486 const_double_split (x, &high, &low);
487 fprintf (file, "%ld", (long) low);
488 break;
490 default:
491 gcc_unreachable ();
493 break;
494 case 'L':
495 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
496 break;
497 case 'M':
498 fprintf (file, "%d", exact_log2 (INTVAL (x)));
499 break;
500 case 'O':
501 gcc_assert (special_symbolref_operand (x, VOIDmode));
503 if (GET_CODE (x) == CONST)
504 x = XEXP (XEXP (x, 0), 0);
505 else
506 gcc_assert (GET_CODE (x) == SYMBOL_REF);
508 if (SYMBOL_REF_ZDA_P (x))
509 fprintf (file, "zdaoff");
510 else if (SYMBOL_REF_SDA_P (x))
511 fprintf (file, "sdaoff");
512 else if (SYMBOL_REF_TDA_P (x))
513 fprintf (file, "tdaoff");
514 else
515 gcc_unreachable ();
516 break;
517 case 'P':
518 gcc_assert (special_symbolref_operand (x, VOIDmode));
519 output_addr_const (file, x);
520 break;
521 case 'Q':
522 gcc_assert (special_symbolref_operand (x, VOIDmode));
524 if (GET_CODE (x) == CONST)
525 x = XEXP (XEXP (x, 0), 0);
526 else
527 gcc_assert (GET_CODE (x) == SYMBOL_REF);
529 if (SYMBOL_REF_ZDA_P (x))
530 fprintf (file, "r0");
531 else if (SYMBOL_REF_SDA_P (x))
532 fprintf (file, "gp");
533 else if (SYMBOL_REF_TDA_P (x))
534 fprintf (file, "ep");
535 else
536 gcc_unreachable ();
537 break;
538 case 'R': /* 2nd word of a double. */
539 switch (GET_CODE (x))
541 case REG:
542 fprintf (file, reg_names[REGNO (x) + 1]);
543 break;
544 case MEM:
546 machine_mode mode = GET_MODE (x);
547 x = XEXP (adjust_address (x, SImode, 4), 0);
548 v850_print_operand_address (file, mode, x);
549 if (GET_CODE (x) == CONST_INT)
550 fprintf (file, "[r0]");
552 break;
554 case CONST_INT:
556 unsigned HOST_WIDE_INT v = INTVAL (x);
558 /* Trickery to avoid problems with shifting
559 32-bits at a time on a 32-bit host. */
560 v = v >> 16;
561 v = v >> 16;
562 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
563 break;
566 case CONST_DOUBLE:
567 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
568 break;
570 default:
571 debug_rtx (x);
572 gcc_unreachable ();
574 break;
575 case 'S':
577 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
578 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
579 fputs ("s", file);
581 break;
583 case 'T':
585 /* Like an 'S' operand above, but for unsigned loads only. */
586 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
587 fputs ("s", file);
589 break;
591 case 'W': /* Print the instruction suffix. */
592 switch (GET_MODE (x))
594 default:
595 gcc_unreachable ();
597 case E_QImode: fputs (".b", file); break;
598 case E_HImode: fputs (".h", file); break;
599 case E_SImode: fputs (".w", file); break;
600 case E_SFmode: fputs (".w", file); break;
602 break;
603 case '.': /* Register r0. */
604 fputs (reg_names[0], file);
605 break;
606 case 'z': /* Reg or zero. */
607 if (REG_P (x))
608 fputs (reg_names[REGNO (x)], file);
609 else if ((GET_MODE(x) == SImode
610 || GET_MODE(x) == DFmode
611 || GET_MODE(x) == SFmode)
612 && x == CONST0_RTX(GET_MODE(x)))
613 fputs (reg_names[0], file);
614 else
616 gcc_assert (x == const0_rtx);
617 fputs (reg_names[0], file);
619 break;
620 default:
621 switch (GET_CODE (x))
623 case MEM:
624 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
625 output_address (GET_MODE (x),
626 gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
627 XEXP (x, 0)));
628 else
629 output_address (GET_MODE (x), XEXP (x, 0));
630 break;
632 case REG:
633 fputs (reg_names[REGNO (x)], file);
634 break;
635 case SUBREG:
636 fputs (reg_names[subreg_regno (x)], file);
637 break;
638 case CONST_DOUBLE:
639 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
640 break;
642 case CONST_INT:
643 case SYMBOL_REF:
644 case CONST:
645 case LABEL_REF:
646 case CODE_LABEL:
647 v850_print_operand_address (file, VOIDmode, x);
648 break;
649 default:
650 gcc_unreachable ();
652 break;
658 /* Output assembly language output for the address ADDR to FILE. */
660 static void
661 v850_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
663 switch (GET_CODE (addr))
665 case REG:
666 fprintf (file, "0[");
667 v850_print_operand (file, addr, 0);
668 fprintf (file, "]");
669 break;
670 case LO_SUM:
671 if (GET_CODE (XEXP (addr, 0)) == REG)
673 /* reg,foo */
674 fprintf (file, "lo(");
675 v850_print_operand (file, XEXP (addr, 1), 0);
676 fprintf (file, ")[");
677 v850_print_operand (file, XEXP (addr, 0), 0);
678 fprintf (file, "]");
680 break;
681 case PLUS:
682 if (GET_CODE (XEXP (addr, 0)) == REG
683 || GET_CODE (XEXP (addr, 0)) == SUBREG)
685 /* reg,foo */
686 v850_print_operand (file, XEXP (addr, 1), 0);
687 fprintf (file, "[");
688 v850_print_operand (file, XEXP (addr, 0), 0);
689 fprintf (file, "]");
691 else
693 v850_print_operand (file, XEXP (addr, 0), 0);
694 fprintf (file, "+");
695 v850_print_operand (file, XEXP (addr, 1), 0);
697 break;
698 case SYMBOL_REF:
700 const char *off_name = NULL;
701 const char *reg_name = NULL;
703 if (SYMBOL_REF_ZDA_P (addr))
705 off_name = "zdaoff";
706 reg_name = "r0";
708 else if (SYMBOL_REF_SDA_P (addr))
710 off_name = "sdaoff";
711 reg_name = "gp";
713 else if (SYMBOL_REF_TDA_P (addr))
715 off_name = "tdaoff";
716 reg_name = "ep";
719 if (off_name)
720 fprintf (file, "%s(", off_name);
721 output_addr_const (file, addr);
722 if (reg_name)
723 fprintf (file, ")[%s]", reg_name);
725 break;
726 case CONST:
727 if (special_symbolref_operand (addr, VOIDmode))
729 rtx x = XEXP (XEXP (addr, 0), 0);
730 const char *off_name;
731 const char *reg_name;
733 if (SYMBOL_REF_ZDA_P (x))
735 off_name = "zdaoff";
736 reg_name = "r0";
738 else if (SYMBOL_REF_SDA_P (x))
740 off_name = "sdaoff";
741 reg_name = "gp";
743 else if (SYMBOL_REF_TDA_P (x))
745 off_name = "tdaoff";
746 reg_name = "ep";
748 else
749 gcc_unreachable ();
751 fprintf (file, "%s(", off_name);
752 output_addr_const (file, addr);
753 fprintf (file, ")[%s]", reg_name);
755 else
756 output_addr_const (file, addr);
757 break;
758 default:
759 output_addr_const (file, addr);
760 break;
764 static bool
765 v850_print_operand_punct_valid_p (unsigned char code)
767 return code == '.';
770 /* When assemble_integer is used to emit the offsets for a switch
771 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
772 output_addr_const will normally barf at this, but it is OK to omit
773 the truncate and just emit the difference of the two labels. The
774 .hword directive will automatically handle the truncation for us.
776 Returns true if rtx was handled, false otherwise. */
778 static bool
779 v850_output_addr_const_extra (FILE * file, rtx x)
781 if (GET_CODE (x) != TRUNCATE)
782 return false;
784 x = XEXP (x, 0);
786 /* We must also handle the case where the switch table was passed a
787 constant value and so has been collapsed. In this case the first
788 label will have been deleted. In such a case it is OK to emit
789 nothing, since the table will not be used.
790 (cf gcc.c-torture/compile/990801-1.c). */
791 if (GET_CODE (x) == MINUS
792 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
794 rtx_code_label *label
795 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
796 if (label && label->deleted ())
797 return true;
800 output_addr_const (file, x);
801 return true;
804 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
805 point value. */
807 const char *
808 output_move_single (rtx * operands)
810 rtx dst = operands[0];
811 rtx src = operands[1];
813 if (REG_P (dst))
815 if (REG_P (src))
816 return "mov %1,%0";
818 else if (GET_CODE (src) == CONST_INT)
820 HOST_WIDE_INT value = INTVAL (src);
822 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
823 return "mov %1,%0";
825 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
826 return "movea %1,%.,%0";
828 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
829 return "movhi hi0(%1),%.,%0";
831 /* A random constant. */
832 else if (TARGET_V850E_UP)
833 return "mov %1,%0";
834 else
835 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
838 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
840 HOST_WIDE_INT high, low;
842 const_double_split (src, &high, &low);
844 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
845 return "mov %F1,%0";
847 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
848 return "movea %F1,%.,%0";
850 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
851 return "movhi hi0(%F1),%.,%0";
853 /* A random constant. */
854 else if (TARGET_V850E_UP)
855 return "mov %F1,%0";
857 else
858 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
861 else if (GET_CODE (src) == MEM)
862 return "%S1ld%W1 %1,%0";
864 else if (special_symbolref_operand (src, VOIDmode))
865 return "movea %O1(%P1),%Q1,%0";
867 else if (GET_CODE (src) == LABEL_REF
868 || GET_CODE (src) == SYMBOL_REF
869 || GET_CODE (src) == CONST)
871 if (TARGET_V850E_UP)
872 return "mov hilo(%1),%0";
873 else
874 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
877 else if (GET_CODE (src) == HIGH)
878 return "movhi hi(%1),%.,%0";
880 else if (GET_CODE (src) == LO_SUM)
882 operands[2] = XEXP (src, 0);
883 operands[3] = XEXP (src, 1);
884 return "movea lo(%3),%2,%0";
888 else if (GET_CODE (dst) == MEM)
890 if (REG_P (src))
891 return "%S0st%W0 %1,%0";
893 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
894 return "%S0st%W0 %.,%0";
896 else if (GET_CODE (src) == CONST_DOUBLE
897 && CONST0_RTX (GET_MODE (dst)) == src)
898 return "%S0st%W0 %.,%0";
901 fatal_insn ("output_move_single:", gen_rtx_SET (dst, src));
902 return "";
905 machine_mode
906 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
908 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
910 switch (cond)
912 case LE:
913 return CC_FPU_LEmode;
914 case GE:
915 return CC_FPU_GEmode;
916 case LT:
917 return CC_FPU_LTmode;
918 case GT:
919 return CC_FPU_GTmode;
920 case EQ:
921 return CC_FPU_EQmode;
922 case NE:
923 return CC_FPU_NEmode;
924 default:
925 gcc_unreachable ();
928 return CCmode;
931 machine_mode
932 v850_gen_float_compare (enum rtx_code cond, machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
934 if (GET_MODE (op0) == DFmode)
936 switch (cond)
938 case LE:
939 emit_insn (gen_cmpdf_le_insn (op0, op1));
940 break;
941 case GE:
942 emit_insn (gen_cmpdf_ge_insn (op0, op1));
943 break;
944 case LT:
945 emit_insn (gen_cmpdf_lt_insn (op0, op1));
946 break;
947 case GT:
948 emit_insn (gen_cmpdf_gt_insn (op0, op1));
949 break;
950 case NE:
951 /* Note: There is no NE comparison operator. So we
952 perform an EQ comparison and invert the branch.
953 See v850_float_nz_comparison for how this is done. */
954 case EQ:
955 emit_insn (gen_cmpdf_eq_insn (op0, op1));
956 break;
957 default:
958 gcc_unreachable ();
961 else if (GET_MODE (v850_compare_op0) == SFmode)
963 switch (cond)
965 case LE:
966 emit_insn (gen_cmpsf_le_insn(op0, op1));
967 break;
968 case GE:
969 emit_insn (gen_cmpsf_ge_insn(op0, op1));
970 break;
971 case LT:
972 emit_insn (gen_cmpsf_lt_insn(op0, op1));
973 break;
974 case GT:
975 emit_insn (gen_cmpsf_gt_insn(op0, op1));
976 break;
977 case NE:
978 /* Note: There is no NE comparison operator. So we
979 perform an EQ comparison and invert the branch.
980 See v850_float_nz_comparison for how this is done. */
981 case EQ:
982 emit_insn (gen_cmpsf_eq_insn(op0, op1));
983 break;
984 default:
985 gcc_unreachable ();
988 else
989 gcc_unreachable ();
991 return v850_select_cc_mode (cond, op0, op1);
995 v850_gen_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
997 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
999 emit_insn (gen_cmpsi_insn (op0, op1));
1000 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1002 else
1004 rtx cc_reg;
1005 mode = v850_gen_float_compare (cond, mode, op0, op1);
1006 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1007 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1009 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1013 /* Return maximum offset supported for a short EP memory reference of mode
1014 MODE and signedness UNSIGNEDP. */
1016 static int
1017 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1019 int max_offset = 0;
1021 switch (mode)
1023 case E_QImode:
1024 if (TARGET_SMALL_SLD)
1025 max_offset = (1 << 4);
1026 else if ((TARGET_V850E_UP)
1027 && unsignedp)
1028 max_offset = (1 << 4);
1029 else
1030 max_offset = (1 << 7);
1031 break;
1033 case E_HImode:
1034 if (TARGET_SMALL_SLD)
1035 max_offset = (1 << 5);
1036 else if ((TARGET_V850E_UP)
1037 && unsignedp)
1038 max_offset = (1 << 5);
1039 else
1040 max_offset = (1 << 8);
1041 break;
1043 case E_SImode:
1044 case E_SFmode:
1045 max_offset = (1 << 8);
1046 break;
1048 default:
1049 break;
1052 return max_offset;
1055 /* Return true if OP is a valid short EP memory reference */
1058 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1060 rtx addr, op0, op1;
1061 int max_offset;
1062 int mask;
1064 /* If we are not using the EP register on a per-function basis
1065 then do not allow this optimization at all. This is to
1066 prevent the use of the SLD/SST instructions which cannot be
1067 guaranteed to work properly due to a hardware bug. */
1068 if (!TARGET_EP)
1069 return FALSE;
1071 if (GET_CODE (op) != MEM)
1072 return FALSE;
1074 max_offset = ep_memory_offset (mode, unsigned_load);
1076 mask = GET_MODE_SIZE (mode) - 1;
1078 addr = XEXP (op, 0);
1079 if (GET_CODE (addr) == CONST)
1080 addr = XEXP (addr, 0);
1082 switch (GET_CODE (addr))
1084 default:
1085 break;
1087 case SYMBOL_REF:
1088 return SYMBOL_REF_TDA_P (addr);
1090 case REG:
1091 return REGNO (addr) == EP_REGNUM;
1093 case PLUS:
1094 op0 = XEXP (addr, 0);
1095 op1 = XEXP (addr, 1);
1096 if (GET_CODE (op1) == CONST_INT
1097 && INTVAL (op1) < max_offset
1098 && INTVAL (op1) >= 0
1099 && (INTVAL (op1) & mask) == 0)
1101 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1102 return TRUE;
1104 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1105 return TRUE;
1107 break;
1110 return FALSE;
1113 /* Substitute memory references involving a pointer, to use the ep pointer,
1114 taking care to save and preserve the ep. */
1116 static void
1117 substitute_ep_register (rtx_insn *first_insn,
1118 rtx_insn *last_insn,
1119 int uses,
1120 int regno,
1121 rtx * p_r1,
1122 rtx * p_ep)
1124 rtx reg = gen_rtx_REG (Pmode, regno);
1125 rtx_insn *insn;
1127 if (!*p_r1)
1129 df_set_regs_ever_live (1, true);
1130 *p_r1 = gen_rtx_REG (Pmode, 1);
1131 *p_ep = gen_rtx_REG (Pmode, 30);
1134 if (TARGET_DEBUG)
1135 fprintf (stderr, "\
1136 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1137 2 * (uses - 3), uses, reg_names[regno],
1138 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1139 INSN_UID (first_insn), INSN_UID (last_insn));
1141 if (NOTE_P (first_insn))
1142 first_insn = next_nonnote_insn (first_insn);
1144 last_insn = next_nonnote_insn (last_insn);
1145 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1147 if (NONJUMP_INSN_P (insn))
1149 rtx pattern = single_set (insn);
1151 /* Replace the memory references. */
1152 if (pattern)
1154 rtx *p_mem;
1155 /* Memory operands are signed by default. */
1156 int unsignedp = FALSE;
1158 if (GET_CODE (SET_DEST (pattern)) == MEM
1159 && GET_CODE (SET_SRC (pattern)) == MEM)
1160 p_mem = (rtx *)0;
1162 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1163 p_mem = &SET_DEST (pattern);
1165 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1166 p_mem = &SET_SRC (pattern);
1168 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1170 p_mem = &XEXP (SET_SRC (pattern), 0);
1172 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1173 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1175 p_mem = &XEXP (SET_SRC (pattern), 0);
1176 unsignedp = TRUE;
1178 else
1179 p_mem = (rtx *)0;
1181 if (p_mem)
1183 rtx addr = XEXP (*p_mem, 0);
1185 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1186 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1188 else if (GET_CODE (addr) == PLUS
1189 && GET_CODE (XEXP (addr, 0)) == REG
1190 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1191 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1192 && ((INTVAL (XEXP (addr, 1)))
1193 < ep_memory_offset (GET_MODE (*p_mem),
1194 unsignedp))
1195 && ((INTVAL (XEXP (addr, 1))) >= 0))
1196 *p_mem = change_address (*p_mem, VOIDmode,
1197 gen_rtx_PLUS (Pmode,
1198 *p_ep,
1199 XEXP (addr, 1)));
1205 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1206 insn = prev_nonnote_insn (first_insn);
1207 if (insn && NONJUMP_INSN_P (insn)
1208 && GET_CODE (PATTERN (insn)) == SET
1209 && SET_DEST (PATTERN (insn)) == *p_ep
1210 && SET_SRC (PATTERN (insn)) == *p_r1)
1211 delete_insn (insn);
1212 else
1213 emit_insn_before (gen_rtx_SET (*p_r1, *p_ep), first_insn);
1215 emit_insn_before (gen_rtx_SET (*p_ep, reg), first_insn);
1216 emit_insn_before (gen_rtx_SET (*p_ep, *p_r1), last_insn);
1220 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1221 the -mep mode to copy heavily used pointers to ep to use the implicit
1222 addressing. */
1224 static void
1225 v850_reorg (void)
1227 struct
1229 int uses;
1230 rtx_insn *first_insn;
1231 rtx_insn *last_insn;
1233 regs[FIRST_PSEUDO_REGISTER];
1235 int i;
1236 int use_ep = FALSE;
1237 rtx r1 = NULL_RTX;
1238 rtx ep = NULL_RTX;
1239 rtx_insn *insn;
1240 rtx pattern;
1242 /* If not ep mode, just return now. */
1243 if (!TARGET_EP)
1244 return;
1246 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1248 regs[i].uses = 0;
1249 regs[i].first_insn = NULL;
1250 regs[i].last_insn = NULL;
1253 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1255 switch (GET_CODE (insn))
1257 /* End of basic block */
1258 default:
1259 if (!use_ep)
1261 int max_uses = -1;
1262 int max_regno = -1;
1264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1266 if (max_uses < regs[i].uses)
1268 max_uses = regs[i].uses;
1269 max_regno = i;
1273 if (max_uses > 3)
1274 substitute_ep_register (regs[max_regno].first_insn,
1275 regs[max_regno].last_insn,
1276 max_uses, max_regno, &r1, &ep);
1279 use_ep = FALSE;
1280 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1282 regs[i].uses = 0;
1283 regs[i].first_insn = NULL;
1284 regs[i].last_insn = NULL;
1286 break;
1288 case NOTE:
1289 break;
1291 case INSN:
1292 pattern = single_set (insn);
1294 /* See if there are any memory references we can shorten. */
1295 if (pattern)
1297 rtx src = SET_SRC (pattern);
1298 rtx dest = SET_DEST (pattern);
1299 rtx mem;
1300 /* Memory operands are signed by default. */
1301 int unsignedp = FALSE;
1303 /* We might have (SUBREG (MEM)) here, so just get rid of the
1304 subregs to make this code simpler. */
1305 if (GET_CODE (dest) == SUBREG
1306 && (GET_CODE (SUBREG_REG (dest)) == MEM
1307 || GET_CODE (SUBREG_REG (dest)) == REG))
1308 alter_subreg (&dest, false);
1309 if (GET_CODE (src) == SUBREG
1310 && (GET_CODE (SUBREG_REG (src)) == MEM
1311 || GET_CODE (SUBREG_REG (src)) == REG))
1312 alter_subreg (&src, false);
1314 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1315 mem = NULL_RTX;
1317 else if (GET_CODE (dest) == MEM)
1318 mem = dest;
1320 else if (GET_CODE (src) == MEM)
1321 mem = src;
1323 else if (GET_CODE (src) == SIGN_EXTEND
1324 && GET_CODE (XEXP (src, 0)) == MEM)
1325 mem = XEXP (src, 0);
1327 else if (GET_CODE (src) == ZERO_EXTEND
1328 && GET_CODE (XEXP (src, 0)) == MEM)
1330 mem = XEXP (src, 0);
1331 unsignedp = TRUE;
1333 else
1334 mem = NULL_RTX;
1336 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1337 use_ep = TRUE;
1339 else if (!use_ep && mem
1340 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1342 rtx addr = XEXP (mem, 0);
1343 int regno = -1;
1344 int short_p;
1346 if (GET_CODE (addr) == REG)
1348 short_p = TRUE;
1349 regno = REGNO (addr);
1352 else if (GET_CODE (addr) == PLUS
1353 && GET_CODE (XEXP (addr, 0)) == REG
1354 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1355 && ((INTVAL (XEXP (addr, 1)))
1356 < ep_memory_offset (GET_MODE (mem), unsignedp))
1357 && ((INTVAL (XEXP (addr, 1))) >= 0))
1359 short_p = TRUE;
1360 regno = REGNO (XEXP (addr, 0));
1363 else
1364 short_p = FALSE;
1366 if (short_p)
1368 regs[regno].uses++;
1369 regs[regno].last_insn = insn;
1370 if (!regs[regno].first_insn)
1371 regs[regno].first_insn = insn;
1375 /* Loading up a register in the basic block zaps any savings
1376 for the register */
1377 if (GET_CODE (dest) == REG)
1379 int regno;
1380 int endregno;
1382 regno = REGNO (dest);
1383 endregno = END_REGNO (dest);
1385 if (!use_ep)
1387 /* See if we can use the pointer before this
1388 modification. */
1389 int max_uses = -1;
1390 int max_regno = -1;
1392 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1394 if (max_uses < regs[i].uses)
1396 max_uses = regs[i].uses;
1397 max_regno = i;
1401 if (max_uses > 3
1402 && max_regno >= regno
1403 && max_regno < endregno)
1405 substitute_ep_register (regs[max_regno].first_insn,
1406 regs[max_regno].last_insn,
1407 max_uses, max_regno, &r1,
1408 &ep);
1410 /* Since we made a substitution, zap all remembered
1411 registers. */
1412 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1414 regs[i].uses = 0;
1415 regs[i].first_insn = NULL;
1416 regs[i].last_insn = NULL;
1421 for (i = regno; i < endregno; i++)
1423 regs[i].uses = 0;
1424 regs[i].first_insn = NULL;
1425 regs[i].last_insn = NULL;
1433 /* # of registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_NUM 5
1436 /* # of bytes for registers saved by the interrupt handler. */
1437 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1439 /* # of words saved for other registers. */
1440 #define INTERRUPT_ALL_SAVE_NUM \
1441 (30 - INTERRUPT_FIXED_NUM)
1443 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1446 compute_register_save_size (long * p_reg_saved)
1448 int size = 0;
1449 int i;
1450 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1451 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1452 long reg_saved = 0;
1454 /* Count space for the register saves. */
1455 if (interrupt_handler)
1457 for (i = 0; i <= 31; i++)
1458 switch (i)
1460 default:
1461 if (df_regs_ever_live_p (i) || call_p)
1463 size += 4;
1464 reg_saved |= 1L << i;
1466 break;
1468 /* We don't save/restore r0 or the stack pointer */
1469 case 0:
1470 case STACK_POINTER_REGNUM:
1471 break;
1473 /* For registers with fixed use, we save them, set them to the
1474 appropriate value, and then restore them.
1475 These registers are handled specially, so don't list them
1476 on the list of registers to save in the prologue. */
1477 case 1: /* temp used to hold ep */
1478 case 4: /* gp */
1479 case 10: /* temp used to call interrupt save/restore */
1480 case 11: /* temp used to call interrupt save/restore (long call) */
1481 case EP_REGNUM: /* ep */
1482 size += 4;
1483 break;
1486 else
1488 /* Find the first register that needs to be saved. */
1489 for (i = 0; i <= 31; i++)
1490 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1491 || i == LINK_POINTER_REGNUM))
1492 break;
1494 /* If it is possible that an out-of-line helper function might be
1495 used to generate the prologue for the current function, then we
1496 need to cover the possibility that such a helper function will
1497 be used, despite the fact that there might be gaps in the list of
1498 registers that need to be saved. To detect this we note that the
1499 helper functions always push at least register r29 (provided
1500 that the function is not an interrupt handler). */
1502 if (TARGET_PROLOG_FUNCTION
1503 && (i == 2 || ((i >= 20) && (i < 30))))
1505 if (i == 2)
1507 size += 4;
1508 reg_saved |= 1L << i;
1510 i = 20;
1513 /* Helper functions save all registers between the starting
1514 register and the last register, regardless of whether they
1515 are actually used by the function or not. */
1516 for (; i <= 29; i++)
1518 size += 4;
1519 reg_saved |= 1L << i;
1522 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1524 size += 4;
1525 reg_saved |= 1L << LINK_POINTER_REGNUM;
1528 else
1530 for (; i <= 31; i++)
1531 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1532 || i == LINK_POINTER_REGNUM))
1534 size += 4;
1535 reg_saved |= 1L << i;
1540 if (p_reg_saved)
1541 *p_reg_saved = reg_saved;
1543 return size;
1546 /* Typical stack layout should looks like this after the function's prologue:
1549 -- ^
1550 | | \ |
1551 | | arguments saved | Increasing
1552 | | on the stack | addresses
1553 PARENT arg pointer -> | | /
1554 -------------------------- ---- -------------------
1555 | | - space for argument split between regs & stack
1557 CHILD | | \ <-- (return address here)
1558 | | other call
1559 | | saved registers
1560 | | /
1562 frame pointer -> | | \ ___
1563 | | local |
1564 | | variables |f
1565 | | / |r
1566 -- |a
1567 | | \ |m
1568 | | outgoing |e
1569 | | arguments | | Decreasing
1570 (hard) frame pointer | | / | | addresses
1571 and stack pointer -> | | / _|_ |
1572 -------------------------- ---- ------------------ V */
1575 compute_frame_size (int size, long * p_reg_saved)
1577 return (size
1578 + compute_register_save_size (p_reg_saved)
1579 + crtl->outgoing_args_size);
1582 static int
1583 use_prolog_function (int num_save, int frame_size)
1585 int alloc_stack = (4 * num_save);
1586 int unalloc_stack = frame_size - alloc_stack;
1587 int save_func_len, restore_func_len;
1588 int save_normal_len, restore_normal_len;
1590 if (! TARGET_DISABLE_CALLT)
1591 save_func_len = restore_func_len = 2;
1592 else
1593 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1595 if (unalloc_stack)
1597 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1598 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1601 /* See if we would have used ep to save the stack. */
1602 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1603 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1604 else
1605 save_normal_len = restore_normal_len = 4 * num_save;
1607 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1608 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1610 /* Don't bother checking if we don't actually save any space.
1611 This happens for instance if one register is saved and additional
1612 stack space is allocated. */
1613 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1616 static void
1617 increment_stack (signed int amount, bool in_prologue)
1619 rtx inc;
1621 if (amount == 0)
1622 return;
1624 inc = GEN_INT (amount);
1626 if (! CONST_OK_FOR_K (amount))
1628 rtx reg = gen_rtx_REG (Pmode, 12);
1630 inc = emit_move_insn (reg, inc);
1631 if (in_prologue)
1632 F (inc);
1633 inc = reg;
1636 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1637 if (in_prologue)
1638 F (inc);
1641 void
1642 expand_prologue (void)
1644 unsigned int i;
1645 unsigned int size = get_frame_size ();
1646 unsigned int actual_fsize;
1647 unsigned int init_stack_alloc = 0;
1648 rtx save_regs[32];
1649 rtx save_all;
1650 unsigned int num_save;
1651 int code;
1652 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1653 long reg_saved = 0;
1655 actual_fsize = compute_frame_size (size, &reg_saved);
1657 if (flag_stack_usage_info)
1658 current_function_static_stack_size = actual_fsize;
1660 /* Save/setup global registers for interrupt functions right now. */
1661 if (interrupt_handler)
1663 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1664 emit_insn (gen_callt_save_interrupt ());
1665 else
1666 emit_insn (gen_save_interrupt ());
1668 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1670 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1671 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1673 /* Interrupt functions are not passed arguments, so no need to
1674 allocate space for split structure arguments. */
1675 gcc_assert (crtl->args.pretend_args_size == 0);
1678 /* Identify all of the saved registers. */
1679 num_save = 0;
1680 for (i = 1; i < 32; i++)
1682 if (((1L << i) & reg_saved) != 0)
1683 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1686 if (crtl->args.pretend_args_size)
1688 if (num_save == 0)
1690 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1691 actual_fsize = 0;
1693 else
1694 increment_stack (- crtl->args.pretend_args_size, true);
1697 /* See if we have an insn that allocates stack space and saves the particular
1698 registers we want to. Note that the helpers won't
1699 allocate additional space for registers GCC saves to complete a
1700 "split" structure argument. */
1701 save_all = NULL_RTX;
1702 if (TARGET_PROLOG_FUNCTION
1703 && !crtl->args.pretend_args_size
1704 && num_save > 0)
1706 if (use_prolog_function (num_save, actual_fsize))
1708 int alloc_stack = 4 * num_save;
1709 int offset = 0;
1711 save_all = gen_rtx_PARALLEL
1712 (VOIDmode,
1713 rtvec_alloc (num_save + 1
1714 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1716 XVECEXP (save_all, 0, 0)
1717 = gen_rtx_SET (stack_pointer_rtx,
1718 gen_rtx_PLUS (Pmode,
1719 stack_pointer_rtx,
1720 GEN_INT(-alloc_stack)));
1721 for (i = 0; i < num_save; i++)
1723 offset -= 4;
1724 XVECEXP (save_all, 0, i+1)
1725 = gen_rtx_SET (gen_rtx_MEM (Pmode,
1726 gen_rtx_PLUS (Pmode,
1727 stack_pointer_rtx,
1728 GEN_INT(offset))),
1729 save_regs[i]);
1732 if (TARGET_DISABLE_CALLT)
1734 XVECEXP (save_all, 0, num_save + 1)
1735 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1737 if (TARGET_LONG_CALLS)
1738 XVECEXP (save_all, 0, num_save + 2)
1739 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1742 v850_all_frame_related (save_all);
1744 code = recog (save_all, NULL, NULL);
1745 if (code >= 0)
1747 rtx insn = emit_insn (save_all);
1748 INSN_CODE (insn) = code;
1749 actual_fsize -= alloc_stack;
1752 else
1753 save_all = NULL_RTX;
1757 /* If no prolog save function is available, store the registers the old
1758 fashioned way (one by one). */
1759 if (!save_all)
1761 /* Special case interrupt functions that save all registers for a call. */
1762 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1764 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1765 emit_insn (gen_callt_save_all_interrupt ());
1766 else
1767 emit_insn (gen_save_all_interrupt ());
1769 else
1771 int offset;
1772 /* If the stack is too big, allocate it in chunks so we can do the
1773 register saves. We use the register save size so we use the ep
1774 register. */
1775 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1776 init_stack_alloc = compute_register_save_size (NULL);
1777 else
1778 init_stack_alloc = actual_fsize;
1780 /* Save registers at the beginning of the stack frame. */
1781 offset = init_stack_alloc - 4;
1783 if (init_stack_alloc)
1784 increment_stack (- (signed) init_stack_alloc, true);
1786 /* Save the return pointer first. */
1787 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1789 F (emit_move_insn (gen_rtx_MEM (SImode,
1790 plus_constant (Pmode,
1791 stack_pointer_rtx,
1792 offset)),
1793 save_regs[--num_save]));
1794 offset -= 4;
1797 for (i = 0; i < num_save; i++)
1799 F (emit_move_insn (gen_rtx_MEM (SImode,
1800 plus_constant (Pmode,
1801 stack_pointer_rtx,
1802 offset)),
1803 save_regs[i]));
1804 offset -= 4;
1809 /* Allocate the rest of the stack that was not allocated above (either it is
1810 > 32K or we just called a function to save the registers and needed more
1811 stack. */
1812 if (actual_fsize > init_stack_alloc)
1813 increment_stack (init_stack_alloc - actual_fsize, true);
1815 /* If we need a frame pointer, set it up now. */
1816 if (frame_pointer_needed)
1817 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1821 void
1822 expand_epilogue (void)
1824 unsigned int i;
1825 unsigned int size = get_frame_size ();
1826 long reg_saved = 0;
1827 int actual_fsize = compute_frame_size (size, &reg_saved);
1828 rtx restore_regs[32];
1829 rtx restore_all;
1830 unsigned int num_restore;
1831 int code;
1832 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1834 /* Eliminate the initial stack stored by interrupt functions. */
1835 if (interrupt_handler)
1837 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1838 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1839 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1842 /* Cut off any dynamic stack created. */
1843 if (frame_pointer_needed)
1844 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1846 /* Identify all of the saved registers. */
1847 num_restore = 0;
1848 for (i = 1; i < 32; i++)
1850 if (((1L << i) & reg_saved) != 0)
1851 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1854 /* See if we have an insn that restores the particular registers we
1855 want to. */
1856 restore_all = NULL_RTX;
1858 if (TARGET_PROLOG_FUNCTION
1859 && num_restore > 0
1860 && !crtl->args.pretend_args_size
1861 && !interrupt_handler)
1863 int alloc_stack = (4 * num_restore);
1865 /* Don't bother checking if we don't actually save any space. */
1866 if (use_prolog_function (num_restore, actual_fsize))
1868 int offset;
1869 restore_all = gen_rtx_PARALLEL (VOIDmode,
1870 rtvec_alloc (num_restore + 2));
1871 XVECEXP (restore_all, 0, 0) = ret_rtx;
1872 XVECEXP (restore_all, 0, 1)
1873 = gen_rtx_SET (stack_pointer_rtx,
1874 gen_rtx_PLUS (Pmode,
1875 stack_pointer_rtx,
1876 GEN_INT (alloc_stack)));
1878 offset = alloc_stack - 4;
1879 for (i = 0; i < num_restore; i++)
1881 XVECEXP (restore_all, 0, i+2)
1882 = gen_rtx_SET (restore_regs[i],
1883 gen_rtx_MEM (Pmode,
1884 gen_rtx_PLUS (Pmode,
1885 stack_pointer_rtx,
1886 GEN_INT(offset))));
1887 offset -= 4;
1890 code = recog (restore_all, NULL, NULL);
1892 if (code >= 0)
1894 rtx insn;
1896 actual_fsize -= alloc_stack;
1897 increment_stack (actual_fsize, false);
1899 insn = emit_jump_insn (restore_all);
1900 INSN_CODE (insn) = code;
1902 else
1903 restore_all = NULL_RTX;
1907 /* If no epilogue save function is available, restore the registers the
1908 old fashioned way (one by one). */
1909 if (!restore_all)
1911 unsigned int init_stack_free;
1913 /* If the stack is large, we need to cut it down in 2 pieces. */
1914 if (interrupt_handler)
1915 init_stack_free = 0;
1916 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1917 init_stack_free = 4 * num_restore;
1918 else
1919 init_stack_free = (signed) actual_fsize;
1921 /* Deallocate the rest of the stack if it is > 32K. */
1922 if ((unsigned int) actual_fsize > init_stack_free)
1923 increment_stack (actual_fsize - init_stack_free, false);
1925 /* Special case interrupt functions that save all registers
1926 for a call. */
1927 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1929 if (! TARGET_DISABLE_CALLT)
1930 emit_insn (gen_callt_restore_all_interrupt ());
1931 else
1932 emit_insn (gen_restore_all_interrupt ());
1934 else
1936 /* Restore registers from the beginning of the stack frame. */
1937 int offset = init_stack_free - 4;
1939 /* Restore the return pointer first. */
1940 if (num_restore > 0
1941 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1943 emit_move_insn (restore_regs[--num_restore],
1944 gen_rtx_MEM (SImode,
1945 plus_constant (Pmode,
1946 stack_pointer_rtx,
1947 offset)));
1948 offset -= 4;
1951 for (i = 0; i < num_restore; i++)
1953 emit_move_insn (restore_regs[i],
1954 gen_rtx_MEM (SImode,
1955 plus_constant (Pmode,
1956 stack_pointer_rtx,
1957 offset)));
1959 emit_use (restore_regs[i]);
1960 offset -= 4;
1963 /* Cut back the remainder of the stack. */
1964 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1965 false);
1968 /* And return or use reti for interrupt handlers. */
1969 if (interrupt_handler)
1971 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1972 emit_insn (gen_callt_return_interrupt ());
1973 else
1974 emit_jump_insn (gen_return_interrupt ());
1976 else if (actual_fsize)
1977 emit_jump_insn (gen_return_internal ());
1978 else
1979 emit_jump_insn (gen_return_simple ());
1982 v850_interrupt_cache_p = FALSE;
1983 v850_interrupt_p = FALSE;
1986 /* Update the condition code from the insn. */
1987 void
1988 notice_update_cc (rtx body, rtx_insn *insn)
1990 switch (get_attr_cc (insn))
1992 case CC_NONE:
1993 /* Insn does not affect CC at all. */
1994 break;
1996 case CC_NONE_0HIT:
1997 /* Insn does not change CC, but the 0'th operand has been changed. */
1998 if (cc_status.value1 != 0
1999 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2000 cc_status.value1 = 0;
2001 break;
2003 case CC_SET_ZN:
2004 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2005 V,C is in an unusable state. */
2006 CC_STATUS_INIT;
2007 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2008 cc_status.value1 = recog_data.operand[0];
2009 break;
2011 case CC_SET_ZNV:
2012 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2013 C is in an unusable state. */
2014 CC_STATUS_INIT;
2015 cc_status.flags |= CC_NO_CARRY;
2016 cc_status.value1 = recog_data.operand[0];
2017 break;
2019 case CC_COMPARE:
2020 /* The insn is a compare instruction. */
2021 CC_STATUS_INIT;
2022 cc_status.value1 = SET_SRC (body);
2023 break;
2025 case CC_CLOBBER:
2026 /* Insn doesn't leave CC in a usable state. */
2027 CC_STATUS_INIT;
2028 break;
2030 default:
2031 break;
2035 /* Retrieve the data area that has been chosen for the given decl. */
2037 v850_data_area
2038 v850_get_data_area (tree decl)
2040 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2041 return DATA_AREA_SDA;
2043 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2044 return DATA_AREA_TDA;
2046 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2047 return DATA_AREA_ZDA;
2049 return DATA_AREA_NORMAL;
2052 /* Store the indicated data area in the decl's attributes. */
2054 static void
2055 v850_set_data_area (tree decl, v850_data_area data_area)
2057 tree name;
2059 switch (data_area)
2061 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2062 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2063 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2064 default:
2065 return;
2068 DECL_ATTRIBUTES (decl) = tree_cons
2069 (name, NULL, DECL_ATTRIBUTES (decl));
2072 /* Handle an "interrupt" attribute; arguments as in
2073 struct attribute_spec.handler. */
2074 static tree
2075 v850_handle_interrupt_attribute (tree * node,
2076 tree name,
2077 tree args ATTRIBUTE_UNUSED,
2078 int flags ATTRIBUTE_UNUSED,
2079 bool * no_add_attrs)
2081 if (TREE_CODE (*node) != FUNCTION_DECL)
2083 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2084 name);
2085 *no_add_attrs = true;
2088 return NULL_TREE;
2091 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2092 struct attribute_spec.handler. */
2093 static tree
2094 v850_handle_data_area_attribute (tree* node,
2095 tree name,
2096 tree args ATTRIBUTE_UNUSED,
2097 int flags ATTRIBUTE_UNUSED,
2098 bool * no_add_attrs)
2100 v850_data_area data_area;
2101 v850_data_area area;
2102 tree decl = *node;
2104 /* Implement data area attribute. */
2105 if (is_attribute_p ("sda", name))
2106 data_area = DATA_AREA_SDA;
2107 else if (is_attribute_p ("tda", name))
2108 data_area = DATA_AREA_TDA;
2109 else if (is_attribute_p ("zda", name))
2110 data_area = DATA_AREA_ZDA;
2111 else
2112 gcc_unreachable ();
2114 switch (TREE_CODE (decl))
2116 case VAR_DECL:
2117 if (current_function_decl != NULL_TREE)
2119 error_at (DECL_SOURCE_LOCATION (decl),
2120 "data area attributes cannot be specified for "
2121 "local variables");
2122 *no_add_attrs = true;
2125 /* FALLTHRU */
2127 case FUNCTION_DECL:
2128 area = v850_get_data_area (decl);
2129 if (area != DATA_AREA_NORMAL && data_area != area)
2131 error ("data area of %q+D conflicts with previous declaration",
2132 decl);
2133 *no_add_attrs = true;
2135 break;
2137 default:
2138 break;
2141 return NULL_TREE;
2145 /* Return nonzero if FUNC is an interrupt function as specified
2146 by the "interrupt" attribute. */
2149 v850_interrupt_function_p (tree func)
2151 tree a;
2152 int ret = 0;
2154 if (v850_interrupt_cache_p)
2155 return v850_interrupt_p;
2157 if (TREE_CODE (func) != FUNCTION_DECL)
2158 return 0;
2160 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2161 if (a != NULL_TREE)
2162 ret = 1;
2164 else
2166 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2167 ret = a != NULL_TREE;
2170 /* Its not safe to trust global variables until after function inlining has
2171 been done. */
2172 if (reload_completed | reload_in_progress)
2173 v850_interrupt_p = ret;
2175 return ret;
2179 static void
2180 v850_encode_data_area (tree decl, rtx symbol)
2182 int flags;
2184 /* Map explicit sections into the appropriate attribute */
2185 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2187 if (DECL_SECTION_NAME (decl))
2189 const char *name = DECL_SECTION_NAME (decl);
2191 if (streq (name, ".zdata") || streq (name, ".zbss"))
2192 v850_set_data_area (decl, DATA_AREA_ZDA);
2194 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2195 v850_set_data_area (decl, DATA_AREA_SDA);
2197 else if (streq (name, ".tdata"))
2198 v850_set_data_area (decl, DATA_AREA_TDA);
2201 /* If no attribute, support -m{zda,sda,tda}=n */
2202 else
2204 int size = int_size_in_bytes (TREE_TYPE (decl));
2205 if (size <= 0)
2208 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2209 v850_set_data_area (decl, DATA_AREA_TDA);
2211 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2212 v850_set_data_area (decl, DATA_AREA_SDA);
2214 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2215 v850_set_data_area (decl, DATA_AREA_ZDA);
2218 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2219 return;
2222 flags = SYMBOL_REF_FLAGS (symbol);
2223 switch (v850_get_data_area (decl))
2225 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2226 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2227 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2228 default: gcc_unreachable ();
2230 SYMBOL_REF_FLAGS (symbol) = flags;
2233 static void
2234 v850_encode_section_info (tree decl, rtx rtl, int first)
2236 default_encode_section_info (decl, rtl, first);
2238 if (TREE_CODE (decl) == VAR_DECL
2239 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2240 v850_encode_data_area (decl, XEXP (rtl, 0));
2243 /* Construct a JR instruction to a routine that will perform the equivalent of
2244 the RTL passed in as an argument. This RTL is a function epilogue that
2245 pops registers off the stack and possibly releases some extra stack space
2246 as well. The code has already verified that the RTL matches these
2247 requirements. */
2249 char *
2250 construct_restore_jr (rtx op)
2252 int count = XVECLEN (op, 0);
2253 int stack_bytes;
2254 unsigned long int mask;
2255 unsigned long int first;
2256 unsigned long int last;
2257 int i;
2258 static char buff [100]; /* XXX */
2260 if (count <= 2)
2262 error ("bogus JR construction: %d", count);
2263 return NULL;
2266 /* Work out how many bytes to pop off the stack before retrieving
2267 registers. */
2268 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2269 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2270 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2272 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2274 /* Each pop will remove 4 bytes from the stack.... */
2275 stack_bytes -= (count - 2) * 4;
2277 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2278 if (stack_bytes != 0)
2280 error ("bad amount of stack space removal: %d", stack_bytes);
2281 return NULL;
2284 /* Now compute the bit mask of registers to push. */
2285 mask = 0;
2286 for (i = 2; i < count; i++)
2288 rtx vector_element = XVECEXP (op, 0, i);
2290 gcc_assert (GET_CODE (vector_element) == SET);
2291 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2292 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2293 SImode));
2295 mask |= 1 << REGNO (SET_DEST (vector_element));
2298 /* Scan for the first register to pop. */
2299 for (first = 0; first < 32; first++)
2301 if (mask & (1 << first))
2302 break;
2305 gcc_assert (first < 32);
2307 /* Discover the last register to pop. */
2308 if (mask & (1 << LINK_POINTER_REGNUM))
2310 last = LINK_POINTER_REGNUM;
2312 else
2314 gcc_assert (!stack_bytes);
2315 gcc_assert (mask & (1 << 29));
2317 last = 29;
2320 /* Note, it is possible to have gaps in the register mask.
2321 We ignore this here, and generate a JR anyway. We will
2322 be popping more registers than is strictly necessary, but
2323 it does save code space. */
2325 if (TARGET_LONG_CALLS)
2327 char name[40];
2329 if (first == last)
2330 sprintf (name, "__return_%s", reg_names [first]);
2331 else
2332 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2334 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2335 name, name);
2337 else
2339 if (first == last)
2340 sprintf (buff, "jr __return_%s", reg_names [first]);
2341 else
2342 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2345 return buff;
2349 /* Construct a JARL instruction to a routine that will perform the equivalent
2350 of the RTL passed as a parameter. This RTL is a function prologue that
2351 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2352 some stack space as well. The code has already verified that the RTL
2353 matches these requirements. */
2354 char *
2355 construct_save_jarl (rtx op)
2357 int count = XVECLEN (op, 0);
2358 int stack_bytes;
2359 unsigned long int mask;
2360 unsigned long int first;
2361 unsigned long int last;
2362 int i;
2363 static char buff [100]; /* XXX */
2365 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2367 error ("bogus JARL construction: %d", count);
2368 return NULL;
2371 /* Paranoia. */
2372 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2373 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2374 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2375 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2377 /* Work out how many bytes to push onto the stack after storing the
2378 registers. */
2379 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2381 /* Each push will put 4 bytes from the stack.... */
2382 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2384 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2385 if (stack_bytes != 0)
2387 error ("bad amount of stack space removal: %d", stack_bytes);
2388 return NULL;
2391 /* Now compute the bit mask of registers to push. */
2392 mask = 0;
2393 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2395 rtx vector_element = XVECEXP (op, 0, i);
2397 gcc_assert (GET_CODE (vector_element) == SET);
2398 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2399 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2400 SImode));
2402 mask |= 1 << REGNO (SET_SRC (vector_element));
2405 /* Scan for the first register to push. */
2406 for (first = 0; first < 32; first++)
2408 if (mask & (1 << first))
2409 break;
2412 gcc_assert (first < 32);
2414 /* Discover the last register to push. */
2415 if (mask & (1 << LINK_POINTER_REGNUM))
2417 last = LINK_POINTER_REGNUM;
2419 else
2421 gcc_assert (!stack_bytes);
2422 gcc_assert (mask & (1 << 29));
2424 last = 29;
2427 /* Note, it is possible to have gaps in the register mask.
2428 We ignore this here, and generate a JARL anyway. We will
2429 be pushing more registers than is strictly necessary, but
2430 it does save code space. */
2432 if (TARGET_LONG_CALLS)
2434 char name[40];
2436 if (first == last)
2437 sprintf (name, "__save_%s", reg_names [first]);
2438 else
2439 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2441 if (TARGET_V850E3V5_UP)
2442 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2443 else
2444 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2445 name, name);
2447 else
2449 if (first == last)
2450 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2451 else
2452 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2453 reg_names [last]);
2456 return buff;
2459 /* A version of asm_output_aligned_bss() that copes with the special
2460 data areas of the v850. */
2461 void
2462 v850_output_aligned_bss (FILE * file,
2463 tree decl,
2464 const char * name,
2465 unsigned HOST_WIDE_INT size,
2466 int align)
2468 switch (v850_get_data_area (decl))
2470 case DATA_AREA_ZDA:
2471 switch_to_section (zbss_section);
2472 break;
2474 case DATA_AREA_SDA:
2475 switch_to_section (sbss_section);
2476 break;
2478 case DATA_AREA_TDA:
2479 switch_to_section (tdata_section);
2480 break;
2482 default:
2483 switch_to_section (bss_section);
2484 break;
2487 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2488 #ifdef ASM_DECLARE_OBJECT_NAME
2489 last_assemble_variable_decl = decl;
2490 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2491 #else
2492 /* Standard thing is just output label for the object. */
2493 ASM_OUTPUT_LABEL (file, name);
2494 #endif /* ASM_DECLARE_OBJECT_NAME */
2495 ASM_OUTPUT_SKIP (file, size ? size : 1);
2498 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2499 void
2500 v850_output_common (FILE * file,
2501 tree decl,
2502 const char * name,
2503 int size,
2504 int align)
2506 if (decl == NULL_TREE)
2508 fprintf (file, "%s", COMMON_ASM_OP);
2510 else
2512 switch (v850_get_data_area (decl))
2514 case DATA_AREA_ZDA:
2515 fprintf (file, "%s", ZCOMMON_ASM_OP);
2516 break;
2518 case DATA_AREA_SDA:
2519 fprintf (file, "%s", SCOMMON_ASM_OP);
2520 break;
2522 case DATA_AREA_TDA:
2523 fprintf (file, "%s", TCOMMON_ASM_OP);
2524 break;
2526 default:
2527 fprintf (file, "%s", COMMON_ASM_OP);
2528 break;
2532 assemble_name (file, name);
2533 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2536 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2537 void
2538 v850_output_local (FILE * file,
2539 tree decl,
2540 const char * name,
2541 int size,
2542 int align)
2544 fprintf (file, "%s", LOCAL_ASM_OP);
2545 assemble_name (file, name);
2546 fprintf (file, "\n");
2548 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2551 /* Add data area to the given declaration if a ghs data area pragma is
2552 currently in effect (#pragma ghs startXXX/endXXX). */
2553 static void
2554 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2556 if (data_area_stack
2557 && data_area_stack->data_area
2558 && current_function_decl == NULL_TREE
2559 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2560 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2561 v850_set_data_area (decl, data_area_stack->data_area);
2563 /* Initialize the default names of the v850 specific sections,
2564 if this has not been done before. */
2566 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2568 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2569 = ".sdata";
2571 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2572 = ".rosdata";
2574 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2575 = ".tdata";
2577 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2578 = ".zdata";
2580 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2581 = ".rozdata";
2584 if (current_function_decl == NULL_TREE
2585 && (TREE_CODE (decl) == VAR_DECL
2586 || TREE_CODE (decl) == CONST_DECL
2587 || TREE_CODE (decl) == FUNCTION_DECL)
2588 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2589 && !DECL_SECTION_NAME (decl))
2591 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2592 const char * chosen_section;
2594 if (TREE_CODE (decl) == FUNCTION_DECL)
2595 kind = GHS_SECTION_KIND_TEXT;
2596 else
2598 /* First choose a section kind based on the data area of the decl. */
2599 switch (v850_get_data_area (decl))
2601 default:
2602 gcc_unreachable ();
2604 case DATA_AREA_SDA:
2605 kind = ((TREE_READONLY (decl))
2606 ? GHS_SECTION_KIND_ROSDATA
2607 : GHS_SECTION_KIND_SDATA);
2608 break;
2610 case DATA_AREA_TDA:
2611 kind = GHS_SECTION_KIND_TDATA;
2612 break;
2614 case DATA_AREA_ZDA:
2615 kind = ((TREE_READONLY (decl))
2616 ? GHS_SECTION_KIND_ROZDATA
2617 : GHS_SECTION_KIND_ZDATA);
2618 break;
2620 case DATA_AREA_NORMAL: /* default data area */
2621 if (TREE_READONLY (decl))
2622 kind = GHS_SECTION_KIND_RODATA;
2623 else if (DECL_INITIAL (decl))
2624 kind = GHS_SECTION_KIND_DATA;
2625 else
2626 kind = GHS_SECTION_KIND_BSS;
2630 /* Now, if the section kind has been explicitly renamed,
2631 then attach a section attribute. */
2632 chosen_section = GHS_current_section_names [(int) kind];
2634 /* Otherwise, if this kind of section needs an explicit section
2635 attribute, then also attach one. */
2636 if (chosen_section == NULL)
2637 chosen_section = GHS_default_section_names [(int) kind];
2639 if (chosen_section)
2641 /* Only set the section name if specified by a pragma, because
2642 otherwise it will force those variables to get allocated storage
2643 in this module, rather than by the linker. */
2644 set_decl_section_name (decl, chosen_section);
2649 /* Construct a DISPOSE instruction that is the equivalent of
2650 the given RTX. We have already verified that this should
2651 be possible. */
2653 char *
2654 construct_dispose_instruction (rtx op)
2656 int count = XVECLEN (op, 0);
2657 int stack_bytes;
2658 unsigned long int mask;
2659 int i;
2660 static char buff[ 100 ]; /* XXX */
2661 int use_callt = 0;
2663 if (count <= 2)
2665 error ("bogus DISPOSE construction: %d", count);
2666 return NULL;
2669 /* Work out how many bytes to pop off the
2670 stack before retrieving registers. */
2671 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2672 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2673 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2675 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2677 /* Each pop will remove 4 bytes from the stack.... */
2678 stack_bytes -= (count - 2) * 4;
2680 /* Make sure that the amount we are popping
2681 will fit into the DISPOSE instruction. */
2682 if (stack_bytes > 128)
2684 error ("too much stack space to dispose of: %d", stack_bytes);
2685 return NULL;
2688 /* Now compute the bit mask of registers to push. */
2689 mask = 0;
2691 for (i = 2; i < count; i++)
2693 rtx vector_element = XVECEXP (op, 0, i);
2695 gcc_assert (GET_CODE (vector_element) == SET);
2696 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2697 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2698 SImode));
2700 if (REGNO (SET_DEST (vector_element)) == 2)
2701 use_callt = 1;
2702 else
2703 mask |= 1 << REGNO (SET_DEST (vector_element));
2706 if (! TARGET_DISABLE_CALLT
2707 && (use_callt || stack_bytes == 0))
2709 if (use_callt)
2711 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2712 return buff;
2714 else
2716 for (i = 20; i < 32; i++)
2717 if (mask & (1 << i))
2718 break;
2720 if (i == 31)
2721 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2722 else
2723 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2724 i, (mask & (1 << 31)) ? "31c" : "29");
2727 else
2729 static char regs [100]; /* XXX */
2730 int done_one;
2732 /* Generate the DISPOSE instruction. Note we could just issue the
2733 bit mask as a number as the assembler can cope with this, but for
2734 the sake of our readers we turn it into a textual description. */
2735 regs[0] = 0;
2736 done_one = 0;
2738 for (i = 20; i < 32; i++)
2740 if (mask & (1 << i))
2742 int first;
2744 if (done_one)
2745 strcat (regs, ", ");
2746 else
2747 done_one = 1;
2749 first = i;
2750 strcat (regs, reg_names[ first ]);
2752 for (i++; i < 32; i++)
2753 if ((mask & (1 << i)) == 0)
2754 break;
2756 if (i > first + 1)
2758 strcat (regs, " - ");
2759 strcat (regs, reg_names[ i - 1 ] );
2764 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2767 return buff;
2770 /* Construct a PREPARE instruction that is the equivalent of
2771 the given RTL. We have already verified that this should
2772 be possible. */
2774 char *
2775 construct_prepare_instruction (rtx op)
2777 int count;
2778 int stack_bytes;
2779 unsigned long int mask;
2780 int i;
2781 static char buff[ 100 ]; /* XXX */
2782 int use_callt = 0;
2784 if (XVECLEN (op, 0) <= 1)
2786 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2787 return NULL;
2790 /* Work out how many bytes to push onto
2791 the stack after storing the registers. */
2792 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2793 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2794 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2796 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2799 /* Make sure that the amount we are popping
2800 will fit into the DISPOSE instruction. */
2801 if (stack_bytes < -128)
2803 error ("too much stack space to prepare: %d", stack_bytes);
2804 return NULL;
2807 /* Now compute the bit mask of registers to push. */
2808 count = 0;
2809 mask = 0;
2810 for (i = 1; i < XVECLEN (op, 0); i++)
2812 rtx vector_element = XVECEXP (op, 0, i);
2814 if (GET_CODE (vector_element) == CLOBBER)
2815 continue;
2817 gcc_assert (GET_CODE (vector_element) == SET);
2818 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2819 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2820 SImode));
2822 if (REGNO (SET_SRC (vector_element)) == 2)
2823 use_callt = 1;
2824 else
2825 mask |= 1 << REGNO (SET_SRC (vector_element));
2826 count++;
2829 stack_bytes += count * 4;
2831 if ((! TARGET_DISABLE_CALLT)
2832 && (use_callt || stack_bytes == 0))
2834 if (use_callt)
2836 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2837 return buff;
2840 for (i = 20; i < 32; i++)
2841 if (mask & (1 << i))
2842 break;
2844 if (i == 31)
2845 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2846 else
2847 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2848 i, (mask & (1 << 31)) ? "31c" : "29");
2850 else
2852 static char regs [100]; /* XXX */
2853 int done_one;
2856 /* Generate the PREPARE instruction. Note we could just issue the
2857 bit mask as a number as the assembler can cope with this, but for
2858 the sake of our readers we turn it into a textual description. */
2859 regs[0] = 0;
2860 done_one = 0;
2862 for (i = 20; i < 32; i++)
2864 if (mask & (1 << i))
2866 int first;
2868 if (done_one)
2869 strcat (regs, ", ");
2870 else
2871 done_one = 1;
2873 first = i;
2874 strcat (regs, reg_names[ first ]);
2876 for (i++; i < 32; i++)
2877 if ((mask & (1 << i)) == 0)
2878 break;
2880 if (i > first + 1)
2882 strcat (regs, " - ");
2883 strcat (regs, reg_names[ i - 1 ] );
2888 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2891 return buff;
2894 /* Return an RTX indicating where the return address to the
2895 calling function can be found. */
2898 v850_return_addr (int count)
2900 if (count != 0)
2901 return const0_rtx;
2903 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2906 /* Implement TARGET_ASM_INIT_SECTIONS. */
2908 static void
2909 v850_asm_init_sections (void)
2911 rosdata_section
2912 = get_unnamed_section (0, output_section_asm_op,
2913 "\t.section .rosdata,\"a\"");
2915 rozdata_section
2916 = get_unnamed_section (0, output_section_asm_op,
2917 "\t.section .rozdata,\"a\"");
2919 tdata_section
2920 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2921 "\t.section .tdata,\"aw\"");
2923 zdata_section
2924 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2925 "\t.section .zdata,\"aw\"");
2927 zbss_section
2928 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2929 output_section_asm_op,
2930 "\t.section .zbss,\"aw\"");
2933 static section *
2934 v850_select_section (tree exp,
2935 int reloc ATTRIBUTE_UNUSED,
2936 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2938 if (TREE_CODE (exp) == VAR_DECL)
2940 int is_const;
2941 if (!TREE_READONLY (exp)
2942 || TREE_SIDE_EFFECTS (exp)
2943 || !DECL_INITIAL (exp)
2944 || (DECL_INITIAL (exp) != error_mark_node
2945 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2946 is_const = FALSE;
2947 else
2948 is_const = TRUE;
2950 switch (v850_get_data_area (exp))
2952 case DATA_AREA_ZDA:
2953 return is_const ? rozdata_section : zdata_section;
2955 case DATA_AREA_TDA:
2956 return tdata_section;
2958 case DATA_AREA_SDA:
2959 return is_const ? rosdata_section : sdata_section;
2961 default:
2962 return is_const ? readonly_data_section : data_section;
2965 return readonly_data_section;
2968 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2970 static bool
2971 v850_function_value_regno_p (const unsigned int regno)
2973 return (regno == RV_REGNUM);
2976 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2978 static bool
2979 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2981 /* Return values > 8 bytes in length in memory. */
2982 return int_size_in_bytes (type) > 8
2983 || TYPE_MODE (type) == BLKmode
2984 /* With the rh850 ABI return all aggregates in memory. */
2985 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2989 /* Worker function for TARGET_FUNCTION_VALUE. */
2991 static rtx
2992 v850_function_value (const_tree valtype,
2993 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2994 bool outgoing ATTRIBUTE_UNUSED)
2996 return gen_rtx_REG (TYPE_MODE (valtype), RV_REGNUM);
2999 /* Implement TARGET_LIBCALL_VALUE. */
3001 static rtx
3002 v850_libcall_value (machine_mode mode,
3003 const_rtx func ATTRIBUTE_UNUSED)
3005 return gen_rtx_REG (mode, RV_REGNUM);
3009 /* Worker function for TARGET_CAN_ELIMINATE. */
3011 static bool
3012 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3014 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3017 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3019 If TARGET_APP_REGS is not defined then add r2 and r5 to
3020 the pool of fixed registers. See PR 14505. */
3022 static void
3023 v850_conditional_register_usage (void)
3025 if (TARGET_APP_REGS)
3027 fixed_regs[2] = 0; call_used_regs[2] = 0;
3028 fixed_regs[5] = 0; call_used_regs[5] = 1;
3032 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3034 static void
3035 v850_asm_trampoline_template (FILE *f)
3037 fprintf (f, "\tjarl .+4,r12\n");
3038 fprintf (f, "\tld.w 12[r12],r20\n");
3039 fprintf (f, "\tld.w 16[r12],r12\n");
3040 fprintf (f, "\tjmp [r12]\n");
3041 fprintf (f, "\tnop\n");
3042 fprintf (f, "\t.long 0\n");
3043 fprintf (f, "\t.long 0\n");
3046 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3048 static void
3049 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3051 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3053 emit_block_move (m_tramp, assemble_trampoline_template (),
3054 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3056 mem = adjust_address (m_tramp, SImode, 16);
3057 emit_move_insn (mem, chain_value);
3058 mem = adjust_address (m_tramp, SImode, 20);
3059 emit_move_insn (mem, fnaddr);
3062 static int
3063 v850_issue_rate (void)
3065 return (TARGET_V850E2_UP ? 2 : 1);
3068 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3070 static bool
3071 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3073 return (GET_CODE (x) == CONST_DOUBLE
3074 || !(GET_CODE (x) == CONST
3075 && GET_CODE (XEXP (x, 0)) == PLUS
3076 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3077 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3078 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3081 /* Helper function for `v850_legitimate_address_p'. */
3083 static bool
3084 v850_reg_ok_for_base_p (const_rtx reg, bool strict_p)
3086 if (strict_p)
3088 return REGNO_OK_FOR_BASE_P (REGNO (reg));
3089 } else {
3090 return true;
3094 /* Accept either REG or SUBREG where a register is valid. */
3096 static bool
3097 v850_rtx_ok_for_base_p (const_rtx x, bool strict_p)
3099 return ((REG_P (x) && v850_reg_ok_for_base_p (x, strict_p))
3100 || (SUBREG_P (x) && REG_P (SUBREG_REG (x))
3101 && v850_reg_ok_for_base_p (SUBREG_REG (x), strict_p)));
3104 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3106 static bool
3107 v850_legitimate_address_p (machine_mode mode, rtx x, bool strict_p,
3108 addr_space_t as ATTRIBUTE_UNUSED)
3110 gcc_assert (ADDR_SPACE_GENERIC_P (as));
3112 if (v850_rtx_ok_for_base_p (x, strict_p))
3113 return true;
3114 if (CONSTANT_ADDRESS_P (x)
3115 && (mode == QImode || INTVAL (x) % 2 == 0)
3116 && (GET_MODE_SIZE (mode) <= 4 || INTVAL (x) % 4 == 0))
3117 return true;
3118 if (GET_CODE (x) == LO_SUM
3119 && REG_P (XEXP (x, 0))
3120 && v850_reg_ok_for_base_p (XEXP (x, 0), strict_p)
3121 && CONSTANT_P (XEXP (x, 1))
3122 && (!CONST_INT_P (XEXP (x, 1))
3123 || ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3124 && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)))
3125 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode))
3126 return true;
3127 if (special_symbolref_operand (x, mode)
3128 && (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode)))
3129 return true;
3130 if (GET_CODE (x) == PLUS
3131 && v850_rtx_ok_for_base_p (XEXP (x, 0), strict_p)
3132 && constraint_satisfied_p (XEXP (x,1), CONSTRAINT_K)
3133 && ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3134 && CONST_OK_FOR_K (INTVAL (XEXP (x, 1))
3135 + (GET_MODE_NUNITS (mode) * UNITS_PER_WORD))))
3136 return true;
3138 return false;
3141 static int
3142 v850_memory_move_cost (machine_mode mode,
3143 reg_class_t reg_class ATTRIBUTE_UNUSED,
3144 bool in)
3146 switch (GET_MODE_SIZE (mode))
3148 case 0:
3149 return in ? 24 : 8;
3150 case 1:
3151 case 2:
3152 case 3:
3153 case 4:
3154 return in ? 6 : 2;
3155 default:
3156 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3161 v850_adjust_insn_length (rtx_insn *insn, int length)
3163 if (TARGET_V850E3V5_UP)
3165 if (CALL_P (insn))
3167 if (TARGET_LONG_CALLS)
3169 /* call_internal_long, call_value_internal_long. */
3170 if (length == 8)
3171 length = 4;
3172 if (length == 16)
3173 length = 10;
3175 else
3177 /* call_internal_short, call_value_internal_short. */
3178 if (length == 8)
3179 length = 4;
3183 return length;
3186 /* V850 specific attributes. */
3188 static const struct attribute_spec v850_attribute_table[] =
3190 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3191 affects_type_identity, exclusions } */
3192 { "interrupt_handler", 0, 0, true, false, false,
3193 v850_handle_interrupt_attribute, false, NULL },
3194 { "interrupt", 0, 0, true, false, false,
3195 v850_handle_interrupt_attribute, false, NULL },
3196 { "sda", 0, 0, true, false, false,
3197 v850_handle_data_area_attribute, false, NULL },
3198 { "tda", 0, 0, true, false, false,
3199 v850_handle_data_area_attribute, false, NULL },
3200 { "zda", 0, 0, true, false, false,
3201 v850_handle_data_area_attribute, false, NULL },
3202 { NULL, 0, 0, false, false, false, NULL, false, NULL }
3205 static void
3206 v850_option_override (void)
3208 if (flag_exceptions || flag_non_call_exceptions)
3209 flag_omit_frame_pointer = 0;
3211 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3212 if (! TARGET_GCC_ABI)
3213 target_flags |= MASK_DISABLE_CALLT;
3216 const char *
3217 v850_gen_movdi (rtx * operands)
3219 if (REG_P (operands[0]))
3221 if (REG_P (operands[1]))
3223 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3224 return "mov %1, %0; mov %R1, %R0";
3226 return "mov %R1, %R0; mov %1, %0";
3229 if (MEM_P (operands[1]))
3231 if (REGNO (operands[0]) & 1)
3232 /* Use two load word instructions to synthesise a load double. */
3233 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3235 return "ld.dw %1, %0";
3238 return "mov %1, %0; mov %R1, %R0";
3241 gcc_assert (REG_P (operands[1]));
3243 if (REGNO (operands[1]) & 1)
3244 /* Use two store word instructions to synthesise a store double. */
3245 return "st.w %1, %0 ; st.w %R1, %R0 ";
3247 return "st.dw %1, %0";
3250 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3252 static bool
3253 v850_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
3255 return GET_MODE_SIZE (mode) <= 4 || ((regno & 1) == 0 && regno != 0);
3258 /* Implement TARGET_MODES_TIEABLE_P. */
3260 static bool
3261 v850_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3263 return (mode1 == mode2
3264 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4));
3267 /* Initialize the GCC target structure. */
3269 #undef TARGET_OPTION_OVERRIDE
3270 #define TARGET_OPTION_OVERRIDE v850_option_override
3272 #undef TARGET_MEMORY_MOVE_COST
3273 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3275 #undef TARGET_ASM_ALIGNED_HI_OP
3276 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3278 #undef TARGET_PRINT_OPERAND
3279 #define TARGET_PRINT_OPERAND v850_print_operand
3280 #undef TARGET_PRINT_OPERAND_ADDRESS
3281 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3282 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3283 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3285 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3286 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3288 #undef TARGET_ATTRIBUTE_TABLE
3289 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3291 #undef TARGET_INSERT_ATTRIBUTES
3292 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3294 #undef TARGET_ASM_SELECT_SECTION
3295 #define TARGET_ASM_SELECT_SECTION v850_select_section
3297 /* The assembler supports switchable .bss sections, but
3298 v850_select_section doesn't yet make use of them. */
3299 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3300 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3302 #undef TARGET_ENCODE_SECTION_INFO
3303 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3305 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3306 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3308 #undef TARGET_RTX_COSTS
3309 #define TARGET_RTX_COSTS v850_rtx_costs
3311 #undef TARGET_ADDRESS_COST
3312 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3314 #undef TARGET_MACHINE_DEPENDENT_REORG
3315 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3317 #undef TARGET_SCHED_ISSUE_RATE
3318 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3320 #undef TARGET_FUNCTION_VALUE_REGNO_P
3321 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3322 #undef TARGET_FUNCTION_VALUE
3323 #define TARGET_FUNCTION_VALUE v850_function_value
3324 #undef TARGET_LIBCALL_VALUE
3325 #define TARGET_LIBCALL_VALUE v850_libcall_value
3327 #undef TARGET_PROMOTE_PROTOTYPES
3328 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3330 #undef TARGET_RETURN_IN_MEMORY
3331 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3333 #undef TARGET_PASS_BY_REFERENCE
3334 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3336 #undef TARGET_CALLEE_COPIES
3337 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3339 #undef TARGET_ARG_PARTIAL_BYTES
3340 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3342 #undef TARGET_FUNCTION_ARG
3343 #define TARGET_FUNCTION_ARG v850_function_arg
3345 #undef TARGET_FUNCTION_ARG_ADVANCE
3346 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3348 #undef TARGET_CAN_ELIMINATE
3349 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3351 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3352 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3354 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3355 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3356 #undef TARGET_TRAMPOLINE_INIT
3357 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3359 #undef TARGET_LEGITIMATE_CONSTANT_P
3360 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3362 #undef TARGET_LRA_P
3363 #define TARGET_LRA_P hook_bool_void_false
3365 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3366 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3368 #undef TARGET_CAN_USE_DOLOOP_P
3369 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3371 #undef TARGET_HARD_REGNO_MODE_OK
3372 #define TARGET_HARD_REGNO_MODE_OK v850_hard_regno_mode_ok
3374 #undef TARGET_MODES_TIEABLE_P
3375 #define TARGET_MODES_TIEABLE_P v850_modes_tieable_p
3377 struct gcc_target targetm = TARGET_INITIALIZER;
3379 #include "gt-v850.h"