2015-09-28 David Wohlferd <dw@LimeGreenSocks.com>
[official-gcc.git] / gcc / config / v850 / v850.c
bloba03f5e2fc9d563173d3c490cfce474c49825f366
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "cfghooks.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "df.h"
29 #include "alias.h"
30 #include "stringpool.h"
31 #include "stor-layout.h"
32 #include "varasm.h"
33 #include "calls.h"
34 #include "regs.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "recog.h"
41 #include "expmed.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "emit-rtl.h"
45 #include "stmt.h"
46 #include "expr.h"
47 #include "diagnostic-core.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "cfgrtl.h"
51 #include "cfganal.h"
52 #include "lcm.h"
53 #include "cfgbuild.h"
54 #include "cfgcleanup.h"
55 #include "opts.h"
56 #include "builtins.h"
58 /* This file should be included last. */
59 #include "target-def.h"
61 #ifndef streq
62 #define streq(a,b) (strcmp (a, b) == 0)
63 #endif
65 static void v850_print_operand_address (FILE *, rtx);
67 /* Names of the various data areas used on the v850. */
68 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
69 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
71 /* Track the current data area set by the data area pragma (which
72 can be nested). Tested by check_default_data_area. */
73 data_area_stack_element * data_area_stack = NULL;
75 /* True if we don't need to check any more if the current
76 function is an interrupt handler. */
77 static int v850_interrupt_cache_p = FALSE;
79 rtx v850_compare_op0, v850_compare_op1;
81 /* Whether current function is an interrupt handler. */
82 static int v850_interrupt_p = FALSE;
84 static GTY(()) section * rosdata_section;
85 static GTY(()) section * rozdata_section;
86 static GTY(()) section * tdata_section;
87 static GTY(()) section * zdata_section;
88 static GTY(()) section * zbss_section;
90 /* We use this to wrap all emitted insns in the prologue. */
91 static rtx
92 F (rtx x)
94 if (GET_CODE (x) != CLOBBER)
95 RTX_FRAME_RELATED_P (x) = 1;
96 return x;
99 /* Mark all the subexpressions of the PARALLEL rtx PAR as
100 frame-related. Return PAR.
102 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
103 PARALLEL rtx other than the first if they do not have the
104 FRAME_RELATED flag set on them. */
106 static rtx
107 v850_all_frame_related (rtx par)
109 int len = XVECLEN (par, 0);
110 int i;
112 gcc_assert (GET_CODE (par) == PARALLEL);
113 for (i = 0; i < len; i++)
114 F (XVECEXP (par, 0, i));
116 return par;
119 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
120 Specify whether to pass the argument by reference. */
122 static bool
123 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
124 machine_mode mode, const_tree type,
125 bool named ATTRIBUTE_UNUSED)
127 unsigned HOST_WIDE_INT size;
129 if (!TARGET_GCC_ABI)
130 return 0;
132 if (type)
133 size = int_size_in_bytes (type);
134 else
135 size = GET_MODE_SIZE (mode);
137 return size > 8;
140 /* Return an RTX to represent where an argument with mode MODE
141 and type TYPE will be passed to a function. If the result
142 is NULL_RTX, the argument will be pushed. */
144 static rtx
145 v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
146 const_tree type, bool named)
148 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
149 rtx result = NULL_RTX;
150 int size, align;
152 if (!named)
153 return NULL_RTX;
155 if (mode == BLKmode)
156 size = int_size_in_bytes (type);
157 else
158 size = GET_MODE_SIZE (mode);
160 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
162 if (size < 1)
164 /* Once we have stopped using argument registers, do not start up again. */
165 cum->nbytes = 4 * UNITS_PER_WORD;
166 return NULL_RTX;
169 if (!TARGET_GCC_ABI)
170 align = UNITS_PER_WORD;
171 else if (size <= UNITS_PER_WORD && type)
172 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
173 else
174 align = size;
176 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
178 if (cum->nbytes > 4 * UNITS_PER_WORD)
179 return NULL_RTX;
181 if (type == NULL_TREE
182 && cum->nbytes + size > 4 * UNITS_PER_WORD)
183 return NULL_RTX;
185 switch (cum->nbytes / UNITS_PER_WORD)
187 case 0:
188 result = gen_rtx_REG (mode, 6);
189 break;
190 case 1:
191 result = gen_rtx_REG (mode, 7);
192 break;
193 case 2:
194 result = gen_rtx_REG (mode, 8);
195 break;
196 case 3:
197 result = gen_rtx_REG (mode, 9);
198 break;
199 default:
200 result = NULL_RTX;
203 return result;
206 /* Return the number of bytes which must be put into registers
207 for values which are part in registers and part in memory. */
208 static int
209 v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
210 tree type, bool named)
212 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
213 int size, align;
215 if (!named)
216 return 0;
218 if (mode == BLKmode)
219 size = int_size_in_bytes (type);
220 else
221 size = GET_MODE_SIZE (mode);
223 if (size < 1)
224 size = 1;
226 if (!TARGET_GCC_ABI)
227 align = UNITS_PER_WORD;
228 else if (type)
229 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
230 else
231 align = size;
233 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
235 if (cum->nbytes > 4 * UNITS_PER_WORD)
236 return 0;
238 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
239 return 0;
241 if (type == NULL_TREE
242 && cum->nbytes + size > 4 * UNITS_PER_WORD)
243 return 0;
245 return 4 * UNITS_PER_WORD - cum->nbytes;
248 /* Update the data in CUM to advance over an argument
249 of mode MODE and data type TYPE.
250 (TYPE is null for libcalls where that information may not be available.) */
252 static void
253 v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
254 const_tree type, bool named ATTRIBUTE_UNUSED)
256 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
258 if (!TARGET_GCC_ABI)
259 cum->nbytes += (((mode != BLKmode
260 ? GET_MODE_SIZE (mode)
261 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
262 & -UNITS_PER_WORD);
263 else
264 cum->nbytes += (((type && int_size_in_bytes (type) > 8
265 ? GET_MODE_SIZE (Pmode)
266 : (mode != BLKmode
267 ? GET_MODE_SIZE (mode)
268 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
269 & -UNITS_PER_WORD);
272 /* Return the high and low words of a CONST_DOUBLE */
274 static void
275 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
277 if (GET_CODE (x) == CONST_DOUBLE)
279 long t[2];
280 REAL_VALUE_TYPE rv;
282 switch (GET_MODE (x))
284 case DFmode:
285 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
286 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
287 *p_high = t[1]; /* since v850 is little endian */
288 *p_low = t[0]; /* high is second word */
289 return;
291 case SFmode:
292 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
293 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
294 *p_low = 0;
295 return;
297 case VOIDmode:
298 case DImode:
299 *p_high = CONST_DOUBLE_HIGH (x);
300 *p_low = CONST_DOUBLE_LOW (x);
301 return;
303 default:
304 break;
308 fatal_insn ("const_double_split got a bad insn:", x);
312 /* Return the cost of the rtx R with code CODE. */
314 static int
315 const_costs_int (HOST_WIDE_INT value, int zero_cost)
317 if (CONST_OK_FOR_I (value))
318 return zero_cost;
319 else if (CONST_OK_FOR_J (value))
320 return 1;
321 else if (CONST_OK_FOR_K (value))
322 return 2;
323 else
324 return 4;
327 static int
328 const_costs (rtx r, enum rtx_code c)
330 HOST_WIDE_INT high, low;
332 switch (c)
334 case CONST_INT:
335 return const_costs_int (INTVAL (r), 0);
337 case CONST_DOUBLE:
338 const_double_split (r, &high, &low);
339 if (GET_MODE (r) == SFmode)
340 return const_costs_int (high, 1);
341 else
342 return const_costs_int (high, 1) + const_costs_int (low, 1);
344 case SYMBOL_REF:
345 case LABEL_REF:
346 case CONST:
347 return 2;
349 case HIGH:
350 return 1;
352 default:
353 return 4;
357 static bool
358 v850_rtx_costs (rtx x, machine_mode mode, int outer_code,
359 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
361 enum rtx_code code = GET_CODE (x);
363 switch (code)
365 case CONST_INT:
366 case CONST_DOUBLE:
367 case CONST:
368 case SYMBOL_REF:
369 case LABEL_REF:
370 *total = COSTS_N_INSNS (const_costs (x, code));
371 return true;
373 case MOD:
374 case DIV:
375 case UMOD:
376 case UDIV:
377 if (TARGET_V850E && !speed)
378 *total = 6;
379 else
380 *total = 60;
381 return true;
383 case MULT:
384 if (TARGET_V850E
385 && (mode == SImode || mode == HImode || mode == QImode))
387 if (GET_CODE (XEXP (x, 1)) == REG)
388 *total = 4;
389 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
391 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
392 *total = 6;
393 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
394 *total = 10;
397 else
398 *total = 20;
399 return true;
401 case ZERO_EXTRACT:
402 if (outer_code == COMPARE)
403 *total = 0;
404 return false;
406 default:
407 return false;
411 /* Print operand X using operand code CODE to assembly language output file
412 FILE. */
414 static void
415 v850_print_operand (FILE * file, rtx x, int code)
417 HOST_WIDE_INT high, low;
419 switch (code)
421 case 'c':
422 /* We use 'c' operands with symbols for .vtinherit. */
423 if (GET_CODE (x) == SYMBOL_REF)
425 output_addr_const(file, x);
426 break;
428 /* Fall through. */
429 case 'b':
430 case 'B':
431 case 'C':
432 switch ((code == 'B' || code == 'C')
433 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
435 case NE:
436 if (code == 'c' || code == 'C')
437 fprintf (file, "nz");
438 else
439 fprintf (file, "ne");
440 break;
441 case EQ:
442 if (code == 'c' || code == 'C')
443 fprintf (file, "z");
444 else
445 fprintf (file, "e");
446 break;
447 case GE:
448 fprintf (file, "ge");
449 break;
450 case GT:
451 fprintf (file, "gt");
452 break;
453 case LE:
454 fprintf (file, "le");
455 break;
456 case LT:
457 fprintf (file, "lt");
458 break;
459 case GEU:
460 fprintf (file, "nl");
461 break;
462 case GTU:
463 fprintf (file, "h");
464 break;
465 case LEU:
466 fprintf (file, "nh");
467 break;
468 case LTU:
469 fprintf (file, "l");
470 break;
471 default:
472 gcc_unreachable ();
474 break;
475 case 'F': /* High word of CONST_DOUBLE. */
476 switch (GET_CODE (x))
478 case CONST_INT:
479 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
480 break;
482 case CONST_DOUBLE:
483 const_double_split (x, &high, &low);
484 fprintf (file, "%ld", (long) high);
485 break;
487 default:
488 gcc_unreachable ();
490 break;
491 case 'G': /* Low word of CONST_DOUBLE. */
492 switch (GET_CODE (x))
494 case CONST_INT:
495 fprintf (file, "%ld", (long) INTVAL (x));
496 break;
498 case CONST_DOUBLE:
499 const_double_split (x, &high, &low);
500 fprintf (file, "%ld", (long) low);
501 break;
503 default:
504 gcc_unreachable ();
506 break;
507 case 'L':
508 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
509 break;
510 case 'M':
511 fprintf (file, "%d", exact_log2 (INTVAL (x)));
512 break;
513 case 'O':
514 gcc_assert (special_symbolref_operand (x, VOIDmode));
516 if (GET_CODE (x) == CONST)
517 x = XEXP (XEXP (x, 0), 0);
518 else
519 gcc_assert (GET_CODE (x) == SYMBOL_REF);
521 if (SYMBOL_REF_ZDA_P (x))
522 fprintf (file, "zdaoff");
523 else if (SYMBOL_REF_SDA_P (x))
524 fprintf (file, "sdaoff");
525 else if (SYMBOL_REF_TDA_P (x))
526 fprintf (file, "tdaoff");
527 else
528 gcc_unreachable ();
529 break;
530 case 'P':
531 gcc_assert (special_symbolref_operand (x, VOIDmode));
532 output_addr_const (file, x);
533 break;
534 case 'Q':
535 gcc_assert (special_symbolref_operand (x, VOIDmode));
537 if (GET_CODE (x) == CONST)
538 x = XEXP (XEXP (x, 0), 0);
539 else
540 gcc_assert (GET_CODE (x) == SYMBOL_REF);
542 if (SYMBOL_REF_ZDA_P (x))
543 fprintf (file, "r0");
544 else if (SYMBOL_REF_SDA_P (x))
545 fprintf (file, "gp");
546 else if (SYMBOL_REF_TDA_P (x))
547 fprintf (file, "ep");
548 else
549 gcc_unreachable ();
550 break;
551 case 'R': /* 2nd word of a double. */
552 switch (GET_CODE (x))
554 case REG:
555 fprintf (file, reg_names[REGNO (x) + 1]);
556 break;
557 case MEM:
558 x = XEXP (adjust_address (x, SImode, 4), 0);
559 v850_print_operand_address (file, x);
560 if (GET_CODE (x) == CONST_INT)
561 fprintf (file, "[r0]");
562 break;
564 case CONST_INT:
566 unsigned HOST_WIDE_INT v = INTVAL (x);
568 /* Trickery to avoid problems with shifting
569 32-bits at a time on a 32-bit host. */
570 v = v >> 16;
571 v = v >> 16;
572 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
573 break;
576 case CONST_DOUBLE:
577 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
578 break;
580 default:
581 debug_rtx (x);
582 gcc_unreachable ();
584 break;
585 case 'S':
587 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
588 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
589 fputs ("s", file);
591 break;
593 case 'T':
595 /* Like an 'S' operand above, but for unsigned loads only. */
596 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
597 fputs ("s", file);
599 break;
601 case 'W': /* Print the instruction suffix. */
602 switch (GET_MODE (x))
604 default:
605 gcc_unreachable ();
607 case QImode: fputs (".b", file); break;
608 case HImode: fputs (".h", file); break;
609 case SImode: fputs (".w", file); break;
610 case SFmode: fputs (".w", file); break;
612 break;
613 case '.': /* Register r0. */
614 fputs (reg_names[0], file);
615 break;
616 case 'z': /* Reg or zero. */
617 if (REG_P (x))
618 fputs (reg_names[REGNO (x)], file);
619 else if ((GET_MODE(x) == SImode
620 || GET_MODE(x) == DFmode
621 || GET_MODE(x) == SFmode)
622 && x == CONST0_RTX(GET_MODE(x)))
623 fputs (reg_names[0], file);
624 else
626 gcc_assert (x == const0_rtx);
627 fputs (reg_names[0], file);
629 break;
630 default:
631 switch (GET_CODE (x))
633 case MEM:
634 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
635 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
636 XEXP (x, 0)));
637 else
638 output_address (XEXP (x, 0));
639 break;
641 case REG:
642 fputs (reg_names[REGNO (x)], file);
643 break;
644 case SUBREG:
645 fputs (reg_names[subreg_regno (x)], file);
646 break;
647 case CONST_DOUBLE:
648 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
649 break;
651 case CONST_INT:
652 case SYMBOL_REF:
653 case CONST:
654 case LABEL_REF:
655 case CODE_LABEL:
656 v850_print_operand_address (file, x);
657 break;
658 default:
659 gcc_unreachable ();
661 break;
667 /* Output assembly language output for the address ADDR to FILE. */
669 static void
670 v850_print_operand_address (FILE * file, rtx addr)
672 switch (GET_CODE (addr))
674 case REG:
675 fprintf (file, "0[");
676 v850_print_operand (file, addr, 0);
677 fprintf (file, "]");
678 break;
679 case LO_SUM:
680 if (GET_CODE (XEXP (addr, 0)) == REG)
682 /* reg,foo */
683 fprintf (file, "lo(");
684 v850_print_operand (file, XEXP (addr, 1), 0);
685 fprintf (file, ")[");
686 v850_print_operand (file, XEXP (addr, 0), 0);
687 fprintf (file, "]");
689 break;
690 case PLUS:
691 if (GET_CODE (XEXP (addr, 0)) == REG
692 || GET_CODE (XEXP (addr, 0)) == SUBREG)
694 /* reg,foo */
695 v850_print_operand (file, XEXP (addr, 1), 0);
696 fprintf (file, "[");
697 v850_print_operand (file, XEXP (addr, 0), 0);
698 fprintf (file, "]");
700 else
702 v850_print_operand (file, XEXP (addr, 0), 0);
703 fprintf (file, "+");
704 v850_print_operand (file, XEXP (addr, 1), 0);
706 break;
707 case SYMBOL_REF:
709 const char *off_name = NULL;
710 const char *reg_name = NULL;
712 if (SYMBOL_REF_ZDA_P (addr))
714 off_name = "zdaoff";
715 reg_name = "r0";
717 else if (SYMBOL_REF_SDA_P (addr))
719 off_name = "sdaoff";
720 reg_name = "gp";
722 else if (SYMBOL_REF_TDA_P (addr))
724 off_name = "tdaoff";
725 reg_name = "ep";
728 if (off_name)
729 fprintf (file, "%s(", off_name);
730 output_addr_const (file, addr);
731 if (reg_name)
732 fprintf (file, ")[%s]", reg_name);
734 break;
735 case CONST:
736 if (special_symbolref_operand (addr, VOIDmode))
738 rtx x = XEXP (XEXP (addr, 0), 0);
739 const char *off_name;
740 const char *reg_name;
742 if (SYMBOL_REF_ZDA_P (x))
744 off_name = "zdaoff";
745 reg_name = "r0";
747 else if (SYMBOL_REF_SDA_P (x))
749 off_name = "sdaoff";
750 reg_name = "gp";
752 else if (SYMBOL_REF_TDA_P (x))
754 off_name = "tdaoff";
755 reg_name = "ep";
757 else
758 gcc_unreachable ();
760 fprintf (file, "%s(", off_name);
761 output_addr_const (file, addr);
762 fprintf (file, ")[%s]", reg_name);
764 else
765 output_addr_const (file, addr);
766 break;
767 default:
768 output_addr_const (file, addr);
769 break;
773 static bool
774 v850_print_operand_punct_valid_p (unsigned char code)
776 return code == '.';
779 /* When assemble_integer is used to emit the offsets for a switch
780 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
781 output_addr_const will normally barf at this, but it is OK to omit
782 the truncate and just emit the difference of the two labels. The
783 .hword directive will automatically handle the truncation for us.
785 Returns true if rtx was handled, false otherwise. */
787 static bool
788 v850_output_addr_const_extra (FILE * file, rtx x)
790 if (GET_CODE (x) != TRUNCATE)
791 return false;
793 x = XEXP (x, 0);
795 /* We must also handle the case where the switch table was passed a
796 constant value and so has been collapsed. In this case the first
797 label will have been deleted. In such a case it is OK to emit
798 nothing, since the table will not be used.
799 (cf gcc.c-torture/compile/990801-1.c). */
800 if (GET_CODE (x) == MINUS
801 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
803 rtx_code_label *label
804 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
805 if (label && label->deleted ())
806 return true;
809 output_addr_const (file, x);
810 return true;
813 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
814 point value. */
816 const char *
817 output_move_single (rtx * operands)
819 rtx dst = operands[0];
820 rtx src = operands[1];
822 if (REG_P (dst))
824 if (REG_P (src))
825 return "mov %1,%0";
827 else if (GET_CODE (src) == CONST_INT)
829 HOST_WIDE_INT value = INTVAL (src);
831 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
832 return "mov %1,%0";
834 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
835 return "movea %1,%.,%0";
837 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
838 return "movhi hi0(%1),%.,%0";
840 /* A random constant. */
841 else if (TARGET_V850E_UP)
842 return "mov %1,%0";
843 else
844 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
847 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
849 HOST_WIDE_INT high, low;
851 const_double_split (src, &high, &low);
853 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
854 return "mov %F1,%0";
856 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
857 return "movea %F1,%.,%0";
859 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
860 return "movhi hi0(%F1),%.,%0";
862 /* A random constant. */
863 else if (TARGET_V850E_UP)
864 return "mov %F1,%0";
866 else
867 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
870 else if (GET_CODE (src) == MEM)
871 return "%S1ld%W1 %1,%0";
873 else if (special_symbolref_operand (src, VOIDmode))
874 return "movea %O1(%P1),%Q1,%0";
876 else if (GET_CODE (src) == LABEL_REF
877 || GET_CODE (src) == SYMBOL_REF
878 || GET_CODE (src) == CONST)
880 if (TARGET_V850E_UP)
881 return "mov hilo(%1),%0";
882 else
883 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
886 else if (GET_CODE (src) == HIGH)
887 return "movhi hi(%1),%.,%0";
889 else if (GET_CODE (src) == LO_SUM)
891 operands[2] = XEXP (src, 0);
892 operands[3] = XEXP (src, 1);
893 return "movea lo(%3),%2,%0";
897 else if (GET_CODE (dst) == MEM)
899 if (REG_P (src))
900 return "%S0st%W0 %1,%0";
902 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
903 return "%S0st%W0 %.,%0";
905 else if (GET_CODE (src) == CONST_DOUBLE
906 && CONST0_RTX (GET_MODE (dst)) == src)
907 return "%S0st%W0 %.,%0";
910 fatal_insn ("output_move_single:", gen_rtx_SET (dst, src));
911 return "";
914 machine_mode
915 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
917 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
919 switch (cond)
921 case LE:
922 return CC_FPU_LEmode;
923 case GE:
924 return CC_FPU_GEmode;
925 case LT:
926 return CC_FPU_LTmode;
927 case GT:
928 return CC_FPU_GTmode;
929 case EQ:
930 return CC_FPU_EQmode;
931 case NE:
932 return CC_FPU_NEmode;
933 default:
934 gcc_unreachable ();
937 return CCmode;
940 machine_mode
941 v850_gen_float_compare (enum rtx_code cond, machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
943 if (GET_MODE (op0) == DFmode)
945 switch (cond)
947 case LE:
948 emit_insn (gen_cmpdf_le_insn (op0, op1));
949 break;
950 case GE:
951 emit_insn (gen_cmpdf_ge_insn (op0, op1));
952 break;
953 case LT:
954 emit_insn (gen_cmpdf_lt_insn (op0, op1));
955 break;
956 case GT:
957 emit_insn (gen_cmpdf_gt_insn (op0, op1));
958 break;
959 case NE:
960 /* Note: There is no NE comparison operator. So we
961 perform an EQ comparison and invert the branch.
962 See v850_float_nz_comparison for how this is done. */
963 case EQ:
964 emit_insn (gen_cmpdf_eq_insn (op0, op1));
965 break;
966 default:
967 gcc_unreachable ();
970 else if (GET_MODE (v850_compare_op0) == SFmode)
972 switch (cond)
974 case LE:
975 emit_insn (gen_cmpsf_le_insn(op0, op1));
976 break;
977 case GE:
978 emit_insn (gen_cmpsf_ge_insn(op0, op1));
979 break;
980 case LT:
981 emit_insn (gen_cmpsf_lt_insn(op0, op1));
982 break;
983 case GT:
984 emit_insn (gen_cmpsf_gt_insn(op0, op1));
985 break;
986 case NE:
987 /* Note: There is no NE comparison operator. So we
988 perform an EQ comparison and invert the branch.
989 See v850_float_nz_comparison for how this is done. */
990 case EQ:
991 emit_insn (gen_cmpsf_eq_insn(op0, op1));
992 break;
993 default:
994 gcc_unreachable ();
997 else
998 gcc_unreachable ();
1000 return v850_select_cc_mode (cond, op0, op1);
1004 v850_gen_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
1006 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1008 emit_insn (gen_cmpsi_insn (op0, op1));
1009 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1011 else
1013 rtx cc_reg;
1014 mode = v850_gen_float_compare (cond, mode, op0, op1);
1015 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1016 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1018 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1022 /* Return maximum offset supported for a short EP memory reference of mode
1023 MODE and signedness UNSIGNEDP. */
1025 static int
1026 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1028 int max_offset = 0;
1030 switch (mode)
1032 case QImode:
1033 if (TARGET_SMALL_SLD)
1034 max_offset = (1 << 4);
1035 else if ((TARGET_V850E_UP)
1036 && unsignedp)
1037 max_offset = (1 << 4);
1038 else
1039 max_offset = (1 << 7);
1040 break;
1042 case HImode:
1043 if (TARGET_SMALL_SLD)
1044 max_offset = (1 << 5);
1045 else if ((TARGET_V850E_UP)
1046 && unsignedp)
1047 max_offset = (1 << 5);
1048 else
1049 max_offset = (1 << 8);
1050 break;
1052 case SImode:
1053 case SFmode:
1054 max_offset = (1 << 8);
1055 break;
1057 default:
1058 break;
1061 return max_offset;
1064 /* Return true if OP is a valid short EP memory reference */
1067 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1069 rtx addr, op0, op1;
1070 int max_offset;
1071 int mask;
1073 /* If we are not using the EP register on a per-function basis
1074 then do not allow this optimization at all. This is to
1075 prevent the use of the SLD/SST instructions which cannot be
1076 guaranteed to work properly due to a hardware bug. */
1077 if (!TARGET_EP)
1078 return FALSE;
1080 if (GET_CODE (op) != MEM)
1081 return FALSE;
1083 max_offset = ep_memory_offset (mode, unsigned_load);
1085 mask = GET_MODE_SIZE (mode) - 1;
1087 addr = XEXP (op, 0);
1088 if (GET_CODE (addr) == CONST)
1089 addr = XEXP (addr, 0);
1091 switch (GET_CODE (addr))
1093 default:
1094 break;
1096 case SYMBOL_REF:
1097 return SYMBOL_REF_TDA_P (addr);
1099 case REG:
1100 return REGNO (addr) == EP_REGNUM;
1102 case PLUS:
1103 op0 = XEXP (addr, 0);
1104 op1 = XEXP (addr, 1);
1105 if (GET_CODE (op1) == CONST_INT
1106 && INTVAL (op1) < max_offset
1107 && INTVAL (op1) >= 0
1108 && (INTVAL (op1) & mask) == 0)
1110 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1111 return TRUE;
1113 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1114 return TRUE;
1116 break;
1119 return FALSE;
1122 /* Substitute memory references involving a pointer, to use the ep pointer,
1123 taking care to save and preserve the ep. */
1125 static void
1126 substitute_ep_register (rtx_insn *first_insn,
1127 rtx_insn *last_insn,
1128 int uses,
1129 int regno,
1130 rtx * p_r1,
1131 rtx * p_ep)
1133 rtx reg = gen_rtx_REG (Pmode, regno);
1134 rtx_insn *insn;
1136 if (!*p_r1)
1138 df_set_regs_ever_live (1, true);
1139 *p_r1 = gen_rtx_REG (Pmode, 1);
1140 *p_ep = gen_rtx_REG (Pmode, 30);
1143 if (TARGET_DEBUG)
1144 fprintf (stderr, "\
1145 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1146 2 * (uses - 3), uses, reg_names[regno],
1147 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1148 INSN_UID (first_insn), INSN_UID (last_insn));
1150 if (NOTE_P (first_insn))
1151 first_insn = next_nonnote_insn (first_insn);
1153 last_insn = next_nonnote_insn (last_insn);
1154 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1156 if (NONJUMP_INSN_P (insn))
1158 rtx pattern = single_set (insn);
1160 /* Replace the memory references. */
1161 if (pattern)
1163 rtx *p_mem;
1164 /* Memory operands are signed by default. */
1165 int unsignedp = FALSE;
1167 if (GET_CODE (SET_DEST (pattern)) == MEM
1168 && GET_CODE (SET_SRC (pattern)) == MEM)
1169 p_mem = (rtx *)0;
1171 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1172 p_mem = &SET_DEST (pattern);
1174 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1175 p_mem = &SET_SRC (pattern);
1177 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1178 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1179 p_mem = &XEXP (SET_SRC (pattern), 0);
1181 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1182 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1184 p_mem = &XEXP (SET_SRC (pattern), 0);
1185 unsignedp = TRUE;
1187 else
1188 p_mem = (rtx *)0;
1190 if (p_mem)
1192 rtx addr = XEXP (*p_mem, 0);
1194 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1195 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1197 else if (GET_CODE (addr) == PLUS
1198 && GET_CODE (XEXP (addr, 0)) == REG
1199 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1200 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1201 && ((INTVAL (XEXP (addr, 1)))
1202 < ep_memory_offset (GET_MODE (*p_mem),
1203 unsignedp))
1204 && ((INTVAL (XEXP (addr, 1))) >= 0))
1205 *p_mem = change_address (*p_mem, VOIDmode,
1206 gen_rtx_PLUS (Pmode,
1207 *p_ep,
1208 XEXP (addr, 1)));
1214 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1215 insn = prev_nonnote_insn (first_insn);
1216 if (insn && NONJUMP_INSN_P (insn)
1217 && GET_CODE (PATTERN (insn)) == SET
1218 && SET_DEST (PATTERN (insn)) == *p_ep
1219 && SET_SRC (PATTERN (insn)) == *p_r1)
1220 delete_insn (insn);
1221 else
1222 emit_insn_before (gen_rtx_SET (*p_r1, *p_ep), first_insn);
1224 emit_insn_before (gen_rtx_SET (*p_ep, reg), first_insn);
1225 emit_insn_before (gen_rtx_SET (*p_ep, *p_r1), last_insn);
1229 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1230 the -mep mode to copy heavily used pointers to ep to use the implicit
1231 addressing. */
1233 static void
1234 v850_reorg (void)
1236 struct
1238 int uses;
1239 rtx_insn *first_insn;
1240 rtx_insn *last_insn;
1242 regs[FIRST_PSEUDO_REGISTER];
1244 int i;
1245 int use_ep = FALSE;
1246 rtx r1 = NULL_RTX;
1247 rtx ep = NULL_RTX;
1248 rtx_insn *insn;
1249 rtx pattern;
1251 /* If not ep mode, just return now. */
1252 if (!TARGET_EP)
1253 return;
1255 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1257 regs[i].uses = 0;
1258 regs[i].first_insn = NULL;
1259 regs[i].last_insn = NULL;
1262 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1264 switch (GET_CODE (insn))
1266 /* End of basic block */
1267 default:
1268 if (!use_ep)
1270 int max_uses = -1;
1271 int max_regno = -1;
1273 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1275 if (max_uses < regs[i].uses)
1277 max_uses = regs[i].uses;
1278 max_regno = i;
1282 if (max_uses > 3)
1283 substitute_ep_register (regs[max_regno].first_insn,
1284 regs[max_regno].last_insn,
1285 max_uses, max_regno, &r1, &ep);
1288 use_ep = FALSE;
1289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1291 regs[i].uses = 0;
1292 regs[i].first_insn = NULL;
1293 regs[i].last_insn = NULL;
1295 break;
1297 case NOTE:
1298 break;
1300 case INSN:
1301 pattern = single_set (insn);
1303 /* See if there are any memory references we can shorten. */
1304 if (pattern)
1306 rtx src = SET_SRC (pattern);
1307 rtx dest = SET_DEST (pattern);
1308 rtx mem;
1309 /* Memory operands are signed by default. */
1310 int unsignedp = FALSE;
1312 /* We might have (SUBREG (MEM)) here, so just get rid of the
1313 subregs to make this code simpler. */
1314 if (GET_CODE (dest) == SUBREG
1315 && (GET_CODE (SUBREG_REG (dest)) == MEM
1316 || GET_CODE (SUBREG_REG (dest)) == REG))
1317 alter_subreg (&dest, false);
1318 if (GET_CODE (src) == SUBREG
1319 && (GET_CODE (SUBREG_REG (src)) == MEM
1320 || GET_CODE (SUBREG_REG (src)) == REG))
1321 alter_subreg (&src, false);
1323 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1324 mem = NULL_RTX;
1326 else if (GET_CODE (dest) == MEM)
1327 mem = dest;
1329 else if (GET_CODE (src) == MEM)
1330 mem = src;
1332 else if (GET_CODE (src) == SIGN_EXTEND
1333 && GET_CODE (XEXP (src, 0)) == MEM)
1334 mem = XEXP (src, 0);
1336 else if (GET_CODE (src) == ZERO_EXTEND
1337 && GET_CODE (XEXP (src, 0)) == MEM)
1339 mem = XEXP (src, 0);
1340 unsignedp = TRUE;
1342 else
1343 mem = NULL_RTX;
1345 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1346 use_ep = TRUE;
1348 else if (!use_ep && mem
1349 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1351 rtx addr = XEXP (mem, 0);
1352 int regno = -1;
1353 int short_p;
1355 if (GET_CODE (addr) == REG)
1357 short_p = TRUE;
1358 regno = REGNO (addr);
1361 else if (GET_CODE (addr) == PLUS
1362 && GET_CODE (XEXP (addr, 0)) == REG
1363 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1364 && ((INTVAL (XEXP (addr, 1)))
1365 < ep_memory_offset (GET_MODE (mem), unsignedp))
1366 && ((INTVAL (XEXP (addr, 1))) >= 0))
1368 short_p = TRUE;
1369 regno = REGNO (XEXP (addr, 0));
1372 else
1373 short_p = FALSE;
1375 if (short_p)
1377 regs[regno].uses++;
1378 regs[regno].last_insn = insn;
1379 if (!regs[regno].first_insn)
1380 regs[regno].first_insn = insn;
1384 /* Loading up a register in the basic block zaps any savings
1385 for the register */
1386 if (GET_CODE (dest) == REG)
1388 machine_mode mode = GET_MODE (dest);
1389 int regno;
1390 int endregno;
1392 regno = REGNO (dest);
1393 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1395 if (!use_ep)
1397 /* See if we can use the pointer before this
1398 modification. */
1399 int max_uses = -1;
1400 int max_regno = -1;
1402 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1404 if (max_uses < regs[i].uses)
1406 max_uses = regs[i].uses;
1407 max_regno = i;
1411 if (max_uses > 3
1412 && max_regno >= regno
1413 && max_regno < endregno)
1415 substitute_ep_register (regs[max_regno].first_insn,
1416 regs[max_regno].last_insn,
1417 max_uses, max_regno, &r1,
1418 &ep);
1420 /* Since we made a substitution, zap all remembered
1421 registers. */
1422 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1424 regs[i].uses = 0;
1425 regs[i].first_insn = NULL;
1426 regs[i].last_insn = NULL;
1431 for (i = regno; i < endregno; i++)
1433 regs[i].uses = 0;
1434 regs[i].first_insn = NULL;
1435 regs[i].last_insn = NULL;
1443 /* # of registers saved by the interrupt handler. */
1444 #define INTERRUPT_FIXED_NUM 5
1446 /* # of bytes for registers saved by the interrupt handler. */
1447 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1449 /* # of words saved for other registers. */
1450 #define INTERRUPT_ALL_SAVE_NUM \
1451 (30 - INTERRUPT_FIXED_NUM)
1453 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1456 compute_register_save_size (long * p_reg_saved)
1458 int size = 0;
1459 int i;
1460 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1461 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1462 long reg_saved = 0;
1464 /* Count space for the register saves. */
1465 if (interrupt_handler)
1467 for (i = 0; i <= 31; i++)
1468 switch (i)
1470 default:
1471 if (df_regs_ever_live_p (i) || call_p)
1473 size += 4;
1474 reg_saved |= 1L << i;
1476 break;
1478 /* We don't save/restore r0 or the stack pointer */
1479 case 0:
1480 case STACK_POINTER_REGNUM:
1481 break;
1483 /* For registers with fixed use, we save them, set them to the
1484 appropriate value, and then restore them.
1485 These registers are handled specially, so don't list them
1486 on the list of registers to save in the prologue. */
1487 case 1: /* temp used to hold ep */
1488 case 4: /* gp */
1489 case 10: /* temp used to call interrupt save/restore */
1490 case 11: /* temp used to call interrupt save/restore (long call) */
1491 case EP_REGNUM: /* ep */
1492 size += 4;
1493 break;
1496 else
1498 /* Find the first register that needs to be saved. */
1499 for (i = 0; i <= 31; i++)
1500 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1501 || i == LINK_POINTER_REGNUM))
1502 break;
1504 /* If it is possible that an out-of-line helper function might be
1505 used to generate the prologue for the current function, then we
1506 need to cover the possibility that such a helper function will
1507 be used, despite the fact that there might be gaps in the list of
1508 registers that need to be saved. To detect this we note that the
1509 helper functions always push at least register r29 (provided
1510 that the function is not an interrupt handler). */
1512 if (TARGET_PROLOG_FUNCTION
1513 && (i == 2 || ((i >= 20) && (i < 30))))
1515 if (i == 2)
1517 size += 4;
1518 reg_saved |= 1L << i;
1520 i = 20;
1523 /* Helper functions save all registers between the starting
1524 register and the last register, regardless of whether they
1525 are actually used by the function or not. */
1526 for (; i <= 29; i++)
1528 size += 4;
1529 reg_saved |= 1L << i;
1532 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1534 size += 4;
1535 reg_saved |= 1L << LINK_POINTER_REGNUM;
1538 else
1540 for (; i <= 31; i++)
1541 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1542 || i == LINK_POINTER_REGNUM))
1544 size += 4;
1545 reg_saved |= 1L << i;
1550 if (p_reg_saved)
1551 *p_reg_saved = reg_saved;
1553 return size;
1556 /* Typical stack layout should looks like this after the function's prologue:
1559 -- ^
1560 | | \ |
1561 | | arguments saved | Increasing
1562 | | on the stack | addresses
1563 PARENT arg pointer -> | | /
1564 -------------------------- ---- -------------------
1565 | | - space for argument split between regs & stack
1567 CHILD | | \ <-- (return address here)
1568 | | other call
1569 | | saved registers
1570 | | /
1572 frame pointer -> | | \ ___
1573 | | local |
1574 | | variables |f
1575 | | / |r
1576 -- |a
1577 | | \ |m
1578 | | outgoing |e
1579 | | arguments | | Decreasing
1580 (hard) frame pointer | | / | | addresses
1581 and stack pointer -> | | / _|_ |
1582 -------------------------- ---- ------------------ V */
1585 compute_frame_size (int size, long * p_reg_saved)
1587 return (size
1588 + compute_register_save_size (p_reg_saved)
1589 + crtl->outgoing_args_size);
1592 static int
1593 use_prolog_function (int num_save, int frame_size)
1595 int alloc_stack = (4 * num_save);
1596 int unalloc_stack = frame_size - alloc_stack;
1597 int save_func_len, restore_func_len;
1598 int save_normal_len, restore_normal_len;
1600 if (! TARGET_DISABLE_CALLT)
1601 save_func_len = restore_func_len = 2;
1602 else
1603 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1605 if (unalloc_stack)
1607 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1608 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1611 /* See if we would have used ep to save the stack. */
1612 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1613 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1614 else
1615 save_normal_len = restore_normal_len = 4 * num_save;
1617 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1618 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1620 /* Don't bother checking if we don't actually save any space.
1621 This happens for instance if one register is saved and additional
1622 stack space is allocated. */
1623 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1626 static void
1627 increment_stack (signed int amount, bool in_prologue)
1629 rtx inc;
1631 if (amount == 0)
1632 return;
1634 inc = GEN_INT (amount);
1636 if (! CONST_OK_FOR_K (amount))
1638 rtx reg = gen_rtx_REG (Pmode, 12);
1640 inc = emit_move_insn (reg, inc);
1641 if (in_prologue)
1642 F (inc);
1643 inc = reg;
1646 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1647 if (in_prologue)
1648 F (inc);
1651 void
1652 expand_prologue (void)
1654 unsigned int i;
1655 unsigned int size = get_frame_size ();
1656 unsigned int actual_fsize;
1657 unsigned int init_stack_alloc = 0;
1658 rtx save_regs[32];
1659 rtx save_all;
1660 unsigned int num_save;
1661 int code;
1662 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1663 long reg_saved = 0;
1665 actual_fsize = compute_frame_size (size, &reg_saved);
1667 if (flag_stack_usage_info)
1668 current_function_static_stack_size = actual_fsize;
1670 /* Save/setup global registers for interrupt functions right now. */
1671 if (interrupt_handler)
1673 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1674 emit_insn (gen_callt_save_interrupt ());
1675 else
1676 emit_insn (gen_save_interrupt ());
1678 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1680 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1681 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1683 /* Interrupt functions are not passed arguments, so no need to
1684 allocate space for split structure arguments. */
1685 gcc_assert (crtl->args.pretend_args_size == 0);
1688 /* Identify all of the saved registers. */
1689 num_save = 0;
1690 for (i = 1; i < 32; i++)
1692 if (((1L << i) & reg_saved) != 0)
1693 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1696 if (crtl->args.pretend_args_size)
1698 if (num_save == 0)
1700 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1701 actual_fsize = 0;
1703 else
1704 increment_stack (- crtl->args.pretend_args_size, true);
1707 /* See if we have an insn that allocates stack space and saves the particular
1708 registers we want to. Note that the helpers won't
1709 allocate additional space for registers GCC saves to complete a
1710 "split" structure argument. */
1711 save_all = NULL_RTX;
1712 if (TARGET_PROLOG_FUNCTION
1713 && !crtl->args.pretend_args_size
1714 && num_save > 0)
1716 if (use_prolog_function (num_save, actual_fsize))
1718 int alloc_stack = 4 * num_save;
1719 int offset = 0;
1721 save_all = gen_rtx_PARALLEL
1722 (VOIDmode,
1723 rtvec_alloc (num_save + 1
1724 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1726 XVECEXP (save_all, 0, 0)
1727 = gen_rtx_SET (stack_pointer_rtx,
1728 gen_rtx_PLUS (Pmode,
1729 stack_pointer_rtx,
1730 GEN_INT(-alloc_stack)));
1731 for (i = 0; i < num_save; i++)
1733 offset -= 4;
1734 XVECEXP (save_all, 0, i+1)
1735 = gen_rtx_SET (gen_rtx_MEM (Pmode,
1736 gen_rtx_PLUS (Pmode,
1737 stack_pointer_rtx,
1738 GEN_INT(offset))),
1739 save_regs[i]);
1742 if (TARGET_DISABLE_CALLT)
1744 XVECEXP (save_all, 0, num_save + 1)
1745 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1747 if (TARGET_LONG_CALLS)
1748 XVECEXP (save_all, 0, num_save + 2)
1749 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1752 v850_all_frame_related (save_all);
1754 code = recog (save_all, NULL_RTX, NULL);
1755 if (code >= 0)
1757 rtx insn = emit_insn (save_all);
1758 INSN_CODE (insn) = code;
1759 actual_fsize -= alloc_stack;
1762 else
1763 save_all = NULL_RTX;
1767 /* If no prolog save function is available, store the registers the old
1768 fashioned way (one by one). */
1769 if (!save_all)
1771 /* Special case interrupt functions that save all registers for a call. */
1772 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1774 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1775 emit_insn (gen_callt_save_all_interrupt ());
1776 else
1777 emit_insn (gen_save_all_interrupt ());
1779 else
1781 int offset;
1782 /* If the stack is too big, allocate it in chunks so we can do the
1783 register saves. We use the register save size so we use the ep
1784 register. */
1785 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1786 init_stack_alloc = compute_register_save_size (NULL);
1787 else
1788 init_stack_alloc = actual_fsize;
1790 /* Save registers at the beginning of the stack frame. */
1791 offset = init_stack_alloc - 4;
1793 if (init_stack_alloc)
1794 increment_stack (- (signed) init_stack_alloc, true);
1796 /* Save the return pointer first. */
1797 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1799 F (emit_move_insn (gen_rtx_MEM (SImode,
1800 plus_constant (Pmode,
1801 stack_pointer_rtx,
1802 offset)),
1803 save_regs[--num_save]));
1804 offset -= 4;
1807 for (i = 0; i < num_save; i++)
1809 F (emit_move_insn (gen_rtx_MEM (SImode,
1810 plus_constant (Pmode,
1811 stack_pointer_rtx,
1812 offset)),
1813 save_regs[i]));
1814 offset -= 4;
1819 /* Allocate the rest of the stack that was not allocated above (either it is
1820 > 32K or we just called a function to save the registers and needed more
1821 stack. */
1822 if (actual_fsize > init_stack_alloc)
1823 increment_stack (init_stack_alloc - actual_fsize, true);
1825 /* If we need a frame pointer, set it up now. */
1826 if (frame_pointer_needed)
1827 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1831 void
1832 expand_epilogue (void)
1834 unsigned int i;
1835 unsigned int size = get_frame_size ();
1836 long reg_saved = 0;
1837 int actual_fsize = compute_frame_size (size, &reg_saved);
1838 rtx restore_regs[32];
1839 rtx restore_all;
1840 unsigned int num_restore;
1841 int code;
1842 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1844 /* Eliminate the initial stack stored by interrupt functions. */
1845 if (interrupt_handler)
1847 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1848 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1849 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1852 /* Cut off any dynamic stack created. */
1853 if (frame_pointer_needed)
1854 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1856 /* Identify all of the saved registers. */
1857 num_restore = 0;
1858 for (i = 1; i < 32; i++)
1860 if (((1L << i) & reg_saved) != 0)
1861 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1864 /* See if we have an insn that restores the particular registers we
1865 want to. */
1866 restore_all = NULL_RTX;
1868 if (TARGET_PROLOG_FUNCTION
1869 && num_restore > 0
1870 && !crtl->args.pretend_args_size
1871 && !interrupt_handler)
1873 int alloc_stack = (4 * num_restore);
1875 /* Don't bother checking if we don't actually save any space. */
1876 if (use_prolog_function (num_restore, actual_fsize))
1878 int offset;
1879 restore_all = gen_rtx_PARALLEL (VOIDmode,
1880 rtvec_alloc (num_restore + 2));
1881 XVECEXP (restore_all, 0, 0) = ret_rtx;
1882 XVECEXP (restore_all, 0, 1)
1883 = gen_rtx_SET (stack_pointer_rtx,
1884 gen_rtx_PLUS (Pmode,
1885 stack_pointer_rtx,
1886 GEN_INT (alloc_stack)));
1888 offset = alloc_stack - 4;
1889 for (i = 0; i < num_restore; i++)
1891 XVECEXP (restore_all, 0, i+2)
1892 = gen_rtx_SET (restore_regs[i],
1893 gen_rtx_MEM (Pmode,
1894 gen_rtx_PLUS (Pmode,
1895 stack_pointer_rtx,
1896 GEN_INT(offset))));
1897 offset -= 4;
1900 code = recog (restore_all, NULL_RTX, NULL);
1902 if (code >= 0)
1904 rtx insn;
1906 actual_fsize -= alloc_stack;
1907 increment_stack (actual_fsize, false);
1909 insn = emit_jump_insn (restore_all);
1910 INSN_CODE (insn) = code;
1912 else
1913 restore_all = NULL_RTX;
1917 /* If no epilogue save function is available, restore the registers the
1918 old fashioned way (one by one). */
1919 if (!restore_all)
1921 unsigned int init_stack_free;
1923 /* If the stack is large, we need to cut it down in 2 pieces. */
1924 if (interrupt_handler)
1925 init_stack_free = 0;
1926 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1927 init_stack_free = 4 * num_restore;
1928 else
1929 init_stack_free = (signed) actual_fsize;
1931 /* Deallocate the rest of the stack if it is > 32K. */
1932 if ((unsigned int) actual_fsize > init_stack_free)
1933 increment_stack (actual_fsize - init_stack_free, false);
1935 /* Special case interrupt functions that save all registers
1936 for a call. */
1937 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1939 if (! TARGET_DISABLE_CALLT)
1940 emit_insn (gen_callt_restore_all_interrupt ());
1941 else
1942 emit_insn (gen_restore_all_interrupt ());
1944 else
1946 /* Restore registers from the beginning of the stack frame. */
1947 int offset = init_stack_free - 4;
1949 /* Restore the return pointer first. */
1950 if (num_restore > 0
1951 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1953 emit_move_insn (restore_regs[--num_restore],
1954 gen_rtx_MEM (SImode,
1955 plus_constant (Pmode,
1956 stack_pointer_rtx,
1957 offset)));
1958 offset -= 4;
1961 for (i = 0; i < num_restore; i++)
1963 emit_move_insn (restore_regs[i],
1964 gen_rtx_MEM (SImode,
1965 plus_constant (Pmode,
1966 stack_pointer_rtx,
1967 offset)));
1969 emit_use (restore_regs[i]);
1970 offset -= 4;
1973 /* Cut back the remainder of the stack. */
1974 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1975 false);
1978 /* And return or use reti for interrupt handlers. */
1979 if (interrupt_handler)
1981 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1982 emit_insn (gen_callt_return_interrupt ());
1983 else
1984 emit_jump_insn (gen_return_interrupt ());
1986 else if (actual_fsize)
1987 emit_jump_insn (gen_return_internal ());
1988 else
1989 emit_jump_insn (gen_return_simple ());
1992 v850_interrupt_cache_p = FALSE;
1993 v850_interrupt_p = FALSE;
1996 /* Update the condition code from the insn. */
1997 void
1998 notice_update_cc (rtx body, rtx_insn *insn)
2000 switch (get_attr_cc (insn))
2002 case CC_NONE:
2003 /* Insn does not affect CC at all. */
2004 break;
2006 case CC_NONE_0HIT:
2007 /* Insn does not change CC, but the 0'th operand has been changed. */
2008 if (cc_status.value1 != 0
2009 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2010 cc_status.value1 = 0;
2011 break;
2013 case CC_SET_ZN:
2014 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2015 V,C is in an unusable state. */
2016 CC_STATUS_INIT;
2017 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2018 cc_status.value1 = recog_data.operand[0];
2019 break;
2021 case CC_SET_ZNV:
2022 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2023 C is in an unusable state. */
2024 CC_STATUS_INIT;
2025 cc_status.flags |= CC_NO_CARRY;
2026 cc_status.value1 = recog_data.operand[0];
2027 break;
2029 case CC_COMPARE:
2030 /* The insn is a compare instruction. */
2031 CC_STATUS_INIT;
2032 cc_status.value1 = SET_SRC (body);
2033 break;
2035 case CC_CLOBBER:
2036 /* Insn doesn't leave CC in a usable state. */
2037 CC_STATUS_INIT;
2038 break;
2040 default:
2041 break;
2045 /* Retrieve the data area that has been chosen for the given decl. */
2047 v850_data_area
2048 v850_get_data_area (tree decl)
2050 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2051 return DATA_AREA_SDA;
2053 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2054 return DATA_AREA_TDA;
2056 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2057 return DATA_AREA_ZDA;
2059 return DATA_AREA_NORMAL;
2062 /* Store the indicated data area in the decl's attributes. */
2064 static void
2065 v850_set_data_area (tree decl, v850_data_area data_area)
2067 tree name;
2069 switch (data_area)
2071 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2072 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2073 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2074 default:
2075 return;
2078 DECL_ATTRIBUTES (decl) = tree_cons
2079 (name, NULL, DECL_ATTRIBUTES (decl));
2082 /* Handle an "interrupt" attribute; arguments as in
2083 struct attribute_spec.handler. */
2084 static tree
2085 v850_handle_interrupt_attribute (tree * node,
2086 tree name,
2087 tree args ATTRIBUTE_UNUSED,
2088 int flags ATTRIBUTE_UNUSED,
2089 bool * no_add_attrs)
2091 if (TREE_CODE (*node) != FUNCTION_DECL)
2093 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2094 name);
2095 *no_add_attrs = true;
2098 return NULL_TREE;
2101 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2102 struct attribute_spec.handler. */
2103 static tree
2104 v850_handle_data_area_attribute (tree* node,
2105 tree name,
2106 tree args ATTRIBUTE_UNUSED,
2107 int flags ATTRIBUTE_UNUSED,
2108 bool * no_add_attrs)
2110 v850_data_area data_area;
2111 v850_data_area area;
2112 tree decl = *node;
2114 /* Implement data area attribute. */
2115 if (is_attribute_p ("sda", name))
2116 data_area = DATA_AREA_SDA;
2117 else if (is_attribute_p ("tda", name))
2118 data_area = DATA_AREA_TDA;
2119 else if (is_attribute_p ("zda", name))
2120 data_area = DATA_AREA_ZDA;
2121 else
2122 gcc_unreachable ();
2124 switch (TREE_CODE (decl))
2126 case VAR_DECL:
2127 if (current_function_decl != NULL_TREE)
2129 error_at (DECL_SOURCE_LOCATION (decl),
2130 "data area attributes cannot be specified for "
2131 "local variables");
2132 *no_add_attrs = true;
2135 /* Drop through. */
2137 case FUNCTION_DECL:
2138 area = v850_get_data_area (decl);
2139 if (area != DATA_AREA_NORMAL && data_area != area)
2141 error ("data area of %q+D conflicts with previous declaration",
2142 decl);
2143 *no_add_attrs = true;
2145 break;
2147 default:
2148 break;
2151 return NULL_TREE;
2155 /* Return nonzero if FUNC is an interrupt function as specified
2156 by the "interrupt" attribute. */
2159 v850_interrupt_function_p (tree func)
2161 tree a;
2162 int ret = 0;
2164 if (v850_interrupt_cache_p)
2165 return v850_interrupt_p;
2167 if (TREE_CODE (func) != FUNCTION_DECL)
2168 return 0;
2170 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2171 if (a != NULL_TREE)
2172 ret = 1;
2174 else
2176 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2177 ret = a != NULL_TREE;
2180 /* Its not safe to trust global variables until after function inlining has
2181 been done. */
2182 if (reload_completed | reload_in_progress)
2183 v850_interrupt_p = ret;
2185 return ret;
2189 static void
2190 v850_encode_data_area (tree decl, rtx symbol)
2192 int flags;
2194 /* Map explicit sections into the appropriate attribute */
2195 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2197 if (DECL_SECTION_NAME (decl))
2199 const char *name = DECL_SECTION_NAME (decl);
2201 if (streq (name, ".zdata") || streq (name, ".zbss"))
2202 v850_set_data_area (decl, DATA_AREA_ZDA);
2204 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2205 v850_set_data_area (decl, DATA_AREA_SDA);
2207 else if (streq (name, ".tdata"))
2208 v850_set_data_area (decl, DATA_AREA_TDA);
2211 /* If no attribute, support -m{zda,sda,tda}=n */
2212 else
2214 int size = int_size_in_bytes (TREE_TYPE (decl));
2215 if (size <= 0)
2218 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2219 v850_set_data_area (decl, DATA_AREA_TDA);
2221 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2222 v850_set_data_area (decl, DATA_AREA_SDA);
2224 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2225 v850_set_data_area (decl, DATA_AREA_ZDA);
2228 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2229 return;
2232 flags = SYMBOL_REF_FLAGS (symbol);
2233 switch (v850_get_data_area (decl))
2235 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2236 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2237 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2238 default: gcc_unreachable ();
2240 SYMBOL_REF_FLAGS (symbol) = flags;
2243 static void
2244 v850_encode_section_info (tree decl, rtx rtl, int first)
2246 default_encode_section_info (decl, rtl, first);
2248 if (TREE_CODE (decl) == VAR_DECL
2249 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2250 v850_encode_data_area (decl, XEXP (rtl, 0));
2253 /* Construct a JR instruction to a routine that will perform the equivalent of
2254 the RTL passed in as an argument. This RTL is a function epilogue that
2255 pops registers off the stack and possibly releases some extra stack space
2256 as well. The code has already verified that the RTL matches these
2257 requirements. */
2259 char *
2260 construct_restore_jr (rtx op)
2262 int count = XVECLEN (op, 0);
2263 int stack_bytes;
2264 unsigned long int mask;
2265 unsigned long int first;
2266 unsigned long int last;
2267 int i;
2268 static char buff [100]; /* XXX */
2270 if (count <= 2)
2272 error ("bogus JR construction: %d", count);
2273 return NULL;
2276 /* Work out how many bytes to pop off the stack before retrieving
2277 registers. */
2278 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2279 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2280 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2282 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2284 /* Each pop will remove 4 bytes from the stack.... */
2285 stack_bytes -= (count - 2) * 4;
2287 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2288 if (stack_bytes != 0)
2290 error ("bad amount of stack space removal: %d", stack_bytes);
2291 return NULL;
2294 /* Now compute the bit mask of registers to push. */
2295 mask = 0;
2296 for (i = 2; i < count; i++)
2298 rtx vector_element = XVECEXP (op, 0, i);
2300 gcc_assert (GET_CODE (vector_element) == SET);
2301 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2302 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2303 SImode));
2305 mask |= 1 << REGNO (SET_DEST (vector_element));
2308 /* Scan for the first register to pop. */
2309 for (first = 0; first < 32; first++)
2311 if (mask & (1 << first))
2312 break;
2315 gcc_assert (first < 32);
2317 /* Discover the last register to pop. */
2318 if (mask & (1 << LINK_POINTER_REGNUM))
2320 last = LINK_POINTER_REGNUM;
2322 else
2324 gcc_assert (!stack_bytes);
2325 gcc_assert (mask & (1 << 29));
2327 last = 29;
2330 /* Note, it is possible to have gaps in the register mask.
2331 We ignore this here, and generate a JR anyway. We will
2332 be popping more registers than is strictly necessary, but
2333 it does save code space. */
2335 if (TARGET_LONG_CALLS)
2337 char name[40];
2339 if (first == last)
2340 sprintf (name, "__return_%s", reg_names [first]);
2341 else
2342 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2344 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2345 name, name);
2347 else
2349 if (first == last)
2350 sprintf (buff, "jr __return_%s", reg_names [first]);
2351 else
2352 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2355 return buff;
2359 /* Construct a JARL instruction to a routine that will perform the equivalent
2360 of the RTL passed as a parameter. This RTL is a function prologue that
2361 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2362 some stack space as well. The code has already verified that the RTL
2363 matches these requirements. */
2364 char *
2365 construct_save_jarl (rtx op)
2367 int count = XVECLEN (op, 0);
2368 int stack_bytes;
2369 unsigned long int mask;
2370 unsigned long int first;
2371 unsigned long int last;
2372 int i;
2373 static char buff [100]; /* XXX */
2375 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2377 error ("bogus JARL construction: %d", count);
2378 return NULL;
2381 /* Paranoia. */
2382 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2383 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2384 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2385 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2387 /* Work out how many bytes to push onto the stack after storing the
2388 registers. */
2389 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2391 /* Each push will put 4 bytes from the stack.... */
2392 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2394 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2395 if (stack_bytes != 0)
2397 error ("bad amount of stack space removal: %d", stack_bytes);
2398 return NULL;
2401 /* Now compute the bit mask of registers to push. */
2402 mask = 0;
2403 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2405 rtx vector_element = XVECEXP (op, 0, i);
2407 gcc_assert (GET_CODE (vector_element) == SET);
2408 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2409 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2410 SImode));
2412 mask |= 1 << REGNO (SET_SRC (vector_element));
2415 /* Scan for the first register to push. */
2416 for (first = 0; first < 32; first++)
2418 if (mask & (1 << first))
2419 break;
2422 gcc_assert (first < 32);
2424 /* Discover the last register to push. */
2425 if (mask & (1 << LINK_POINTER_REGNUM))
2427 last = LINK_POINTER_REGNUM;
2429 else
2431 gcc_assert (!stack_bytes);
2432 gcc_assert (mask & (1 << 29));
2434 last = 29;
2437 /* Note, it is possible to have gaps in the register mask.
2438 We ignore this here, and generate a JARL anyway. We will
2439 be pushing more registers than is strictly necessary, but
2440 it does save code space. */
2442 if (TARGET_LONG_CALLS)
2444 char name[40];
2446 if (first == last)
2447 sprintf (name, "__save_%s", reg_names [first]);
2448 else
2449 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2451 if (TARGET_V850E3V5_UP)
2452 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2453 else
2454 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2455 name, name);
2457 else
2459 if (first == last)
2460 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2461 else
2462 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2463 reg_names [last]);
2466 return buff;
2469 /* A version of asm_output_aligned_bss() that copes with the special
2470 data areas of the v850. */
2471 void
2472 v850_output_aligned_bss (FILE * file,
2473 tree decl,
2474 const char * name,
2475 unsigned HOST_WIDE_INT size,
2476 int align)
2478 switch (v850_get_data_area (decl))
2480 case DATA_AREA_ZDA:
2481 switch_to_section (zbss_section);
2482 break;
2484 case DATA_AREA_SDA:
2485 switch_to_section (sbss_section);
2486 break;
2488 case DATA_AREA_TDA:
2489 switch_to_section (tdata_section);
2491 default:
2492 switch_to_section (bss_section);
2493 break;
2496 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2497 #ifdef ASM_DECLARE_OBJECT_NAME
2498 last_assemble_variable_decl = decl;
2499 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2500 #else
2501 /* Standard thing is just output label for the object. */
2502 ASM_OUTPUT_LABEL (file, name);
2503 #endif /* ASM_DECLARE_OBJECT_NAME */
2504 ASM_OUTPUT_SKIP (file, size ? size : 1);
2507 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2508 void
2509 v850_output_common (FILE * file,
2510 tree decl,
2511 const char * name,
2512 int size,
2513 int align)
2515 if (decl == NULL_TREE)
2517 fprintf (file, "%s", COMMON_ASM_OP);
2519 else
2521 switch (v850_get_data_area (decl))
2523 case DATA_AREA_ZDA:
2524 fprintf (file, "%s", ZCOMMON_ASM_OP);
2525 break;
2527 case DATA_AREA_SDA:
2528 fprintf (file, "%s", SCOMMON_ASM_OP);
2529 break;
2531 case DATA_AREA_TDA:
2532 fprintf (file, "%s", TCOMMON_ASM_OP);
2533 break;
2535 default:
2536 fprintf (file, "%s", COMMON_ASM_OP);
2537 break;
2541 assemble_name (file, name);
2542 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2545 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2546 void
2547 v850_output_local (FILE * file,
2548 tree decl,
2549 const char * name,
2550 int size,
2551 int align)
2553 fprintf (file, "%s", LOCAL_ASM_OP);
2554 assemble_name (file, name);
2555 fprintf (file, "\n");
2557 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2560 /* Add data area to the given declaration if a ghs data area pragma is
2561 currently in effect (#pragma ghs startXXX/endXXX). */
2562 static void
2563 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2565 if (data_area_stack
2566 && data_area_stack->data_area
2567 && current_function_decl == NULL_TREE
2568 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2569 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2570 v850_set_data_area (decl, data_area_stack->data_area);
2572 /* Initialize the default names of the v850 specific sections,
2573 if this has not been done before. */
2575 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2577 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2578 = ".sdata";
2580 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2581 = ".rosdata";
2583 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2584 = ".tdata";
2586 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2587 = ".zdata";
2589 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2590 = ".rozdata";
2593 if (current_function_decl == NULL_TREE
2594 && (TREE_CODE (decl) == VAR_DECL
2595 || TREE_CODE (decl) == CONST_DECL
2596 || TREE_CODE (decl) == FUNCTION_DECL)
2597 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2598 && !DECL_SECTION_NAME (decl))
2600 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2601 const char * chosen_section;
2603 if (TREE_CODE (decl) == FUNCTION_DECL)
2604 kind = GHS_SECTION_KIND_TEXT;
2605 else
2607 /* First choose a section kind based on the data area of the decl. */
2608 switch (v850_get_data_area (decl))
2610 default:
2611 gcc_unreachable ();
2613 case DATA_AREA_SDA:
2614 kind = ((TREE_READONLY (decl))
2615 ? GHS_SECTION_KIND_ROSDATA
2616 : GHS_SECTION_KIND_SDATA);
2617 break;
2619 case DATA_AREA_TDA:
2620 kind = GHS_SECTION_KIND_TDATA;
2621 break;
2623 case DATA_AREA_ZDA:
2624 kind = ((TREE_READONLY (decl))
2625 ? GHS_SECTION_KIND_ROZDATA
2626 : GHS_SECTION_KIND_ZDATA);
2627 break;
2629 case DATA_AREA_NORMAL: /* default data area */
2630 if (TREE_READONLY (decl))
2631 kind = GHS_SECTION_KIND_RODATA;
2632 else if (DECL_INITIAL (decl))
2633 kind = GHS_SECTION_KIND_DATA;
2634 else
2635 kind = GHS_SECTION_KIND_BSS;
2639 /* Now, if the section kind has been explicitly renamed,
2640 then attach a section attribute. */
2641 chosen_section = GHS_current_section_names [(int) kind];
2643 /* Otherwise, if this kind of section needs an explicit section
2644 attribute, then also attach one. */
2645 if (chosen_section == NULL)
2646 chosen_section = GHS_default_section_names [(int) kind];
2648 if (chosen_section)
2650 /* Only set the section name if specified by a pragma, because
2651 otherwise it will force those variables to get allocated storage
2652 in this module, rather than by the linker. */
2653 set_decl_section_name (decl, chosen_section);
2658 /* Construct a DISPOSE instruction that is the equivalent of
2659 the given RTX. We have already verified that this should
2660 be possible. */
2662 char *
2663 construct_dispose_instruction (rtx op)
2665 int count = XVECLEN (op, 0);
2666 int stack_bytes;
2667 unsigned long int mask;
2668 int i;
2669 static char buff[ 100 ]; /* XXX */
2670 int use_callt = 0;
2672 if (count <= 2)
2674 error ("bogus DISPOSE construction: %d", count);
2675 return NULL;
2678 /* Work out how many bytes to pop off the
2679 stack before retrieving registers. */
2680 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2681 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2682 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2684 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2686 /* Each pop will remove 4 bytes from the stack.... */
2687 stack_bytes -= (count - 2) * 4;
2689 /* Make sure that the amount we are popping
2690 will fit into the DISPOSE instruction. */
2691 if (stack_bytes > 128)
2693 error ("too much stack space to dispose of: %d", stack_bytes);
2694 return NULL;
2697 /* Now compute the bit mask of registers to push. */
2698 mask = 0;
2700 for (i = 2; i < count; i++)
2702 rtx vector_element = XVECEXP (op, 0, i);
2704 gcc_assert (GET_CODE (vector_element) == SET);
2705 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2706 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2707 SImode));
2709 if (REGNO (SET_DEST (vector_element)) == 2)
2710 use_callt = 1;
2711 else
2712 mask |= 1 << REGNO (SET_DEST (vector_element));
2715 if (! TARGET_DISABLE_CALLT
2716 && (use_callt || stack_bytes == 0))
2718 if (use_callt)
2720 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2721 return buff;
2723 else
2725 for (i = 20; i < 32; i++)
2726 if (mask & (1 << i))
2727 break;
2729 if (i == 31)
2730 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2731 else
2732 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2733 i, (mask & (1 << 31)) ? "31c" : "29");
2736 else
2738 static char regs [100]; /* XXX */
2739 int done_one;
2741 /* Generate the DISPOSE instruction. Note we could just issue the
2742 bit mask as a number as the assembler can cope with this, but for
2743 the sake of our readers we turn it into a textual description. */
2744 regs[0] = 0;
2745 done_one = 0;
2747 for (i = 20; i < 32; i++)
2749 if (mask & (1 << i))
2751 int first;
2753 if (done_one)
2754 strcat (regs, ", ");
2755 else
2756 done_one = 1;
2758 first = i;
2759 strcat (regs, reg_names[ first ]);
2761 for (i++; i < 32; i++)
2762 if ((mask & (1 << i)) == 0)
2763 break;
2765 if (i > first + 1)
2767 strcat (regs, " - ");
2768 strcat (regs, reg_names[ i - 1 ] );
2773 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2776 return buff;
2779 /* Construct a PREPARE instruction that is the equivalent of
2780 the given RTL. We have already verified that this should
2781 be possible. */
2783 char *
2784 construct_prepare_instruction (rtx op)
2786 int count;
2787 int stack_bytes;
2788 unsigned long int mask;
2789 int i;
2790 static char buff[ 100 ]; /* XXX */
2791 int use_callt = 0;
2793 if (XVECLEN (op, 0) <= 1)
2795 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2796 return NULL;
2799 /* Work out how many bytes to push onto
2800 the stack after storing the registers. */
2801 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2802 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2803 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2805 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2808 /* Make sure that the amount we are popping
2809 will fit into the DISPOSE instruction. */
2810 if (stack_bytes < -128)
2812 error ("too much stack space to prepare: %d", stack_bytes);
2813 return NULL;
2816 /* Now compute the bit mask of registers to push. */
2817 count = 0;
2818 mask = 0;
2819 for (i = 1; i < XVECLEN (op, 0); i++)
2821 rtx vector_element = XVECEXP (op, 0, i);
2823 if (GET_CODE (vector_element) == CLOBBER)
2824 continue;
2826 gcc_assert (GET_CODE (vector_element) == SET);
2827 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2828 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2829 SImode));
2831 if (REGNO (SET_SRC (vector_element)) == 2)
2832 use_callt = 1;
2833 else
2834 mask |= 1 << REGNO (SET_SRC (vector_element));
2835 count++;
2838 stack_bytes += count * 4;
2840 if ((! TARGET_DISABLE_CALLT)
2841 && (use_callt || stack_bytes == 0))
2843 if (use_callt)
2845 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2846 return buff;
2849 for (i = 20; i < 32; i++)
2850 if (mask & (1 << i))
2851 break;
2853 if (i == 31)
2854 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2855 else
2856 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2857 i, (mask & (1 << 31)) ? "31c" : "29");
2859 else
2861 static char regs [100]; /* XXX */
2862 int done_one;
2865 /* Generate the PREPARE instruction. Note we could just issue the
2866 bit mask as a number as the assembler can cope with this, but for
2867 the sake of our readers we turn it into a textual description. */
2868 regs[0] = 0;
2869 done_one = 0;
2871 for (i = 20; i < 32; i++)
2873 if (mask & (1 << i))
2875 int first;
2877 if (done_one)
2878 strcat (regs, ", ");
2879 else
2880 done_one = 1;
2882 first = i;
2883 strcat (regs, reg_names[ first ]);
2885 for (i++; i < 32; i++)
2886 if ((mask & (1 << i)) == 0)
2887 break;
2889 if (i > first + 1)
2891 strcat (regs, " - ");
2892 strcat (regs, reg_names[ i - 1 ] );
2897 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2900 return buff;
2903 /* Return an RTX indicating where the return address to the
2904 calling function can be found. */
2907 v850_return_addr (int count)
2909 if (count != 0)
2910 return const0_rtx;
2912 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2915 /* Implement TARGET_ASM_INIT_SECTIONS. */
2917 static void
2918 v850_asm_init_sections (void)
2920 rosdata_section
2921 = get_unnamed_section (0, output_section_asm_op,
2922 "\t.section .rosdata,\"a\"");
2924 rozdata_section
2925 = get_unnamed_section (0, output_section_asm_op,
2926 "\t.section .rozdata,\"a\"");
2928 tdata_section
2929 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2930 "\t.section .tdata,\"aw\"");
2932 zdata_section
2933 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2934 "\t.section .zdata,\"aw\"");
2936 zbss_section
2937 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2938 output_section_asm_op,
2939 "\t.section .zbss,\"aw\"");
2942 static section *
2943 v850_select_section (tree exp,
2944 int reloc ATTRIBUTE_UNUSED,
2945 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2947 if (TREE_CODE (exp) == VAR_DECL)
2949 int is_const;
2950 if (!TREE_READONLY (exp)
2951 || TREE_SIDE_EFFECTS (exp)
2952 || !DECL_INITIAL (exp)
2953 || (DECL_INITIAL (exp) != error_mark_node
2954 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2955 is_const = FALSE;
2956 else
2957 is_const = TRUE;
2959 switch (v850_get_data_area (exp))
2961 case DATA_AREA_ZDA:
2962 return is_const ? rozdata_section : zdata_section;
2964 case DATA_AREA_TDA:
2965 return tdata_section;
2967 case DATA_AREA_SDA:
2968 return is_const ? rosdata_section : sdata_section;
2970 default:
2971 return is_const ? readonly_data_section : data_section;
2974 return readonly_data_section;
2977 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2979 static bool
2980 v850_function_value_regno_p (const unsigned int regno)
2982 return (regno == RV_REGNUM);
2985 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2987 static bool
2988 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2990 /* Return values > 8 bytes in length in memory. */
2991 return int_size_in_bytes (type) > 8
2992 || TYPE_MODE (type) == BLKmode
2993 /* With the rh850 ABI return all aggregates in memory. */
2994 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2998 /* Worker function for TARGET_FUNCTION_VALUE. */
3000 static rtx
3001 v850_function_value (const_tree valtype,
3002 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3003 bool outgoing ATTRIBUTE_UNUSED)
3005 return gen_rtx_REG (TYPE_MODE (valtype), RV_REGNUM);
3008 /* Implement TARGET_LIBCALL_VALUE. */
3010 static rtx
3011 v850_libcall_value (machine_mode mode,
3012 const_rtx func ATTRIBUTE_UNUSED)
3014 return gen_rtx_REG (mode, RV_REGNUM);
3018 /* Worker function for TARGET_CAN_ELIMINATE. */
3020 static bool
3021 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3023 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3026 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3028 If TARGET_APP_REGS is not defined then add r2 and r5 to
3029 the pool of fixed registers. See PR 14505. */
3031 static void
3032 v850_conditional_register_usage (void)
3034 if (TARGET_APP_REGS)
3036 fixed_regs[2] = 0; call_used_regs[2] = 0;
3037 fixed_regs[5] = 0; call_used_regs[5] = 1;
3041 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3043 static void
3044 v850_asm_trampoline_template (FILE *f)
3046 fprintf (f, "\tjarl .+4,r12\n");
3047 fprintf (f, "\tld.w 12[r12],r20\n");
3048 fprintf (f, "\tld.w 16[r12],r12\n");
3049 fprintf (f, "\tjmp [r12]\n");
3050 fprintf (f, "\tnop\n");
3051 fprintf (f, "\t.long 0\n");
3052 fprintf (f, "\t.long 0\n");
3055 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3057 static void
3058 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3060 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3062 emit_block_move (m_tramp, assemble_trampoline_template (),
3063 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3065 mem = adjust_address (m_tramp, SImode, 16);
3066 emit_move_insn (mem, chain_value);
3067 mem = adjust_address (m_tramp, SImode, 20);
3068 emit_move_insn (mem, fnaddr);
3071 static int
3072 v850_issue_rate (void)
3074 return (TARGET_V850E2_UP ? 2 : 1);
3077 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3079 static bool
3080 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3082 return (GET_CODE (x) == CONST_DOUBLE
3083 || !(GET_CODE (x) == CONST
3084 && GET_CODE (XEXP (x, 0)) == PLUS
3085 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3086 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3087 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3090 /* Helper function for `v850_legitimate_address_p'. */
3092 static bool
3093 v850_reg_ok_for_base_p (const_rtx reg, bool strict_p)
3095 if (strict_p)
3097 return REGNO_OK_FOR_BASE_P (REGNO (reg));
3098 } else {
3099 return true;
3103 /* Accept either REG or SUBREG where a register is valid. */
3105 static bool
3106 v850_rtx_ok_for_base_p (const_rtx x, bool strict_p)
3108 return ((REG_P (x) && v850_reg_ok_for_base_p (x, strict_p))
3109 || (SUBREG_P (x) && REG_P (SUBREG_REG (x))
3110 && v850_reg_ok_for_base_p (SUBREG_REG (x), strict_p)));
3113 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3115 static bool
3116 v850_legitimate_address_p (machine_mode mode, rtx x, bool strict_p,
3117 addr_space_t as ATTRIBUTE_UNUSED)
3119 gcc_assert (ADDR_SPACE_GENERIC_P (as));
3121 if (v850_rtx_ok_for_base_p (x, strict_p))
3122 return true;
3123 if (CONSTANT_ADDRESS_P (x)
3124 && (mode == QImode || INTVAL (x) % 2 == 0)
3125 && (GET_MODE_SIZE (mode) <= 4 || INTVAL (x) % 4 == 0))
3126 return true;
3127 if (GET_CODE (x) == LO_SUM
3128 && REG_P (XEXP (x, 0))
3129 && v850_reg_ok_for_base_p (XEXP (x, 0), strict_p)
3130 && CONSTANT_P (XEXP (x, 1))
3131 && (!CONST_INT_P (XEXP (x, 1))
3132 || ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3133 && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)))
3134 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode))
3135 return true;
3136 if (special_symbolref_operand (x, mode)
3137 && (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode)))
3138 return true;
3139 if (GET_CODE (x) == PLUS
3140 && v850_rtx_ok_for_base_p (XEXP (x, 0), strict_p)
3141 && constraint_satisfied_p (XEXP (x,1), CONSTRAINT_K)
3142 && ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3143 && CONST_OK_FOR_K (INTVAL (XEXP (x, 1))
3144 + (GET_MODE_NUNITS (mode) * UNITS_PER_WORD))))
3145 return true;
3147 return false;
3150 static int
3151 v850_memory_move_cost (machine_mode mode,
3152 reg_class_t reg_class ATTRIBUTE_UNUSED,
3153 bool in)
3155 switch (GET_MODE_SIZE (mode))
3157 case 0:
3158 return in ? 24 : 8;
3159 case 1:
3160 case 2:
3161 case 3:
3162 case 4:
3163 return in ? 6 : 2;
3164 default:
3165 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3170 v850_adjust_insn_length (rtx_insn *insn, int length)
3172 if (TARGET_V850E3V5_UP)
3174 if (CALL_P (insn))
3176 if (TARGET_LONG_CALLS)
3178 /* call_internal_long, call_value_internal_long. */
3179 if (length == 8)
3180 length = 4;
3181 if (length == 16)
3182 length = 10;
3184 else
3186 /* call_internal_short, call_value_internal_short. */
3187 if (length == 8)
3188 length = 4;
3192 return length;
3195 /* V850 specific attributes. */
3197 static const struct attribute_spec v850_attribute_table[] =
3199 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3200 affects_type_identity } */
3201 { "interrupt_handler", 0, 0, true, false, false,
3202 v850_handle_interrupt_attribute, false },
3203 { "interrupt", 0, 0, true, false, false,
3204 v850_handle_interrupt_attribute, false },
3205 { "sda", 0, 0, true, false, false,
3206 v850_handle_data_area_attribute, false },
3207 { "tda", 0, 0, true, false, false,
3208 v850_handle_data_area_attribute, false },
3209 { "zda", 0, 0, true, false, false,
3210 v850_handle_data_area_attribute, false },
3211 { NULL, 0, 0, false, false, false, NULL, false }
3214 static void
3215 v850_option_override (void)
3217 if (flag_exceptions || flag_non_call_exceptions)
3218 flag_omit_frame_pointer = 0;
3220 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3221 if (! TARGET_GCC_ABI)
3222 target_flags |= MASK_DISABLE_CALLT;
3225 const char *
3226 v850_gen_movdi (rtx * operands)
3228 if (REG_P (operands[0]))
3230 if (REG_P (operands[1]))
3232 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3233 return "mov %1, %0; mov %R1, %R0";
3235 return "mov %R1, %R0; mov %1, %0";
3238 if (MEM_P (operands[1]))
3240 if (REGNO (operands[0]) & 1)
3241 /* Use two load word instructions to synthesise a load double. */
3242 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3244 return "ld.dw %1, %0";
3247 return "mov %1, %0; mov %R1, %R0";
3250 gcc_assert (REG_P (operands[1]));
3252 if (REGNO (operands[1]) & 1)
3253 /* Use two store word instructions to synthesise a store double. */
3254 return "st.w %1, %0 ; st.w %R1, %R0 ";
3256 return "st.dw %1, %0";
3259 /* Initialize the GCC target structure. */
3261 #undef TARGET_OPTION_OVERRIDE
3262 #define TARGET_OPTION_OVERRIDE v850_option_override
3264 #undef TARGET_MEMORY_MOVE_COST
3265 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3267 #undef TARGET_ASM_ALIGNED_HI_OP
3268 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3270 #undef TARGET_PRINT_OPERAND
3271 #define TARGET_PRINT_OPERAND v850_print_operand
3272 #undef TARGET_PRINT_OPERAND_ADDRESS
3273 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3274 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3275 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3277 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3278 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3280 #undef TARGET_ATTRIBUTE_TABLE
3281 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3283 #undef TARGET_INSERT_ATTRIBUTES
3284 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3286 #undef TARGET_ASM_SELECT_SECTION
3287 #define TARGET_ASM_SELECT_SECTION v850_select_section
3289 /* The assembler supports switchable .bss sections, but
3290 v850_select_section doesn't yet make use of them. */
3291 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3292 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3294 #undef TARGET_ENCODE_SECTION_INFO
3295 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3297 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3298 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3300 #undef TARGET_RTX_COSTS
3301 #define TARGET_RTX_COSTS v850_rtx_costs
3303 #undef TARGET_ADDRESS_COST
3304 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3306 #undef TARGET_MACHINE_DEPENDENT_REORG
3307 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3309 #undef TARGET_SCHED_ISSUE_RATE
3310 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3312 #undef TARGET_FUNCTION_VALUE_REGNO_P
3313 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3314 #undef TARGET_FUNCTION_VALUE
3315 #define TARGET_FUNCTION_VALUE v850_function_value
3316 #undef TARGET_LIBCALL_VALUE
3317 #define TARGET_LIBCALL_VALUE v850_libcall_value
3319 #undef TARGET_PROMOTE_PROTOTYPES
3320 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3322 #undef TARGET_RETURN_IN_MEMORY
3323 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3325 #undef TARGET_PASS_BY_REFERENCE
3326 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3328 #undef TARGET_CALLEE_COPIES
3329 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3331 #undef TARGET_ARG_PARTIAL_BYTES
3332 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3334 #undef TARGET_FUNCTION_ARG
3335 #define TARGET_FUNCTION_ARG v850_function_arg
3337 #undef TARGET_FUNCTION_ARG_ADVANCE
3338 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3340 #undef TARGET_CAN_ELIMINATE
3341 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3343 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3344 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3346 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3347 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3348 #undef TARGET_TRAMPOLINE_INIT
3349 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3351 #undef TARGET_LEGITIMATE_CONSTANT_P
3352 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3354 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3355 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3357 #undef TARGET_CAN_USE_DOLOOP_P
3358 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3360 struct gcc_target targetm = TARGET_INITIALIZER;
3362 #include "gt-v850.h"