PR target/56858
[official-gcc.git] / gcc / config / rx / rx.c
blob3fc2847ac60cbf2e416df1b92a0ba7f1d7f3a3b5
1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* To Do:
23 * Re-enable memory-to-memory copies and fix up reload. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "varasm.h"
31 #include "stor-layout.h"
32 #include "calls.h"
33 #include "rtl.h"
34 #include "regs.h"
35 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "conditions.h"
38 #include "output.h"
39 #include "insn-attr.h"
40 #include "flags.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "libfuncs.h"
45 #include "recog.h"
46 #include "diagnostic-core.h"
47 #include "toplev.h"
48 #include "reload.h"
49 #include "df.h"
50 #include "ggc.h"
51 #include "tm_p.h"
52 #include "debug.h"
53 #include "target.h"
54 #include "target-def.h"
55 #include "langhooks.h"
56 #include "opts.h"
57 #include "cgraph.h"
58 #include "builtins.h"
60 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
61 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
62 static unsigned int rx_num_interrupt_regs;
64 static unsigned int
65 rx_gp_base_regnum (void)
67 if (rx_gp_base_regnum_val == INVALID_REGNUM)
68 gcc_unreachable ();
69 return rx_gp_base_regnum_val;
72 static unsigned int
73 rx_pid_base_regnum (void)
75 if (rx_pid_base_regnum_val == INVALID_REGNUM)
76 gcc_unreachable ();
77 return rx_pid_base_regnum_val;
80 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
82 static tree
83 rx_decl_for_addr (rtx op)
85 if (GET_CODE (op) == MEM)
86 op = XEXP (op, 0);
87 if (GET_CODE (op) == CONST)
88 op = XEXP (op, 0);
89 while (GET_CODE (op) == PLUS)
90 op = XEXP (op, 0);
91 if (GET_CODE (op) == SYMBOL_REF)
92 return SYMBOL_REF_DECL (op);
93 return NULL_TREE;
96 static void rx_print_operand (FILE *, rtx, int);
98 #define CC_FLAG_S (1 << 0)
99 #define CC_FLAG_Z (1 << 1)
100 #define CC_FLAG_O (1 << 2)
101 #define CC_FLAG_C (1 << 3)
102 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
104 static unsigned int flags_from_mode (enum machine_mode mode);
105 static unsigned int flags_from_code (enum rtx_code code);
107 /* Return true if OP is a reference to an object in a PID data area. */
109 enum pid_type
111 PID_NOT_PID = 0, /* The object is not in the PID data area. */
112 PID_ENCODED, /* The object is in the PID data area. */
113 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
116 static enum pid_type
117 rx_pid_data_operand (rtx op)
119 tree op_decl;
121 if (!TARGET_PID)
122 return PID_NOT_PID;
124 if (GET_CODE (op) == PLUS
125 && GET_CODE (XEXP (op, 0)) == REG
126 && GET_CODE (XEXP (op, 1)) == CONST
127 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
128 return PID_ENCODED;
130 op_decl = rx_decl_for_addr (op);
132 if (op_decl)
134 if (TREE_READONLY (op_decl))
135 return PID_UNENCODED;
137 else
139 /* Sigh, some special cases. */
140 if (GET_CODE (op) == SYMBOL_REF
141 || GET_CODE (op) == LABEL_REF)
142 return PID_UNENCODED;
145 return PID_NOT_PID;
148 static rtx
149 rx_legitimize_address (rtx x,
150 rtx oldx ATTRIBUTE_UNUSED,
151 enum machine_mode mode ATTRIBUTE_UNUSED)
153 if (rx_pid_data_operand (x) == PID_UNENCODED)
155 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
156 return rv;
159 if (GET_CODE (x) == PLUS
160 && GET_CODE (XEXP (x, 0)) == PLUS
161 && REG_P (XEXP (XEXP (x, 0), 0))
162 && REG_P (XEXP (x, 1)))
163 return force_reg (SImode, x);
165 return x;
168 /* Return true if OP is a reference to an object in a small data area. */
170 static bool
171 rx_small_data_operand (rtx op)
173 if (rx_small_data_limit == 0)
174 return false;
176 if (GET_CODE (op) == SYMBOL_REF)
177 return SYMBOL_REF_SMALL_P (op);
179 return false;
182 static bool
183 rx_is_legitimate_address (enum machine_mode mode, rtx x,
184 bool strict ATTRIBUTE_UNUSED)
186 if (RTX_OK_FOR_BASE (x, strict))
187 /* Register Indirect. */
188 return true;
190 if ((GET_MODE_SIZE (mode) == 4
191 || GET_MODE_SIZE (mode) == 2
192 || GET_MODE_SIZE (mode) == 1)
193 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
194 /* Pre-decrement Register Indirect or
195 Post-increment Register Indirect. */
196 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
198 switch (rx_pid_data_operand (x))
200 case PID_UNENCODED:
201 return false;
202 case PID_ENCODED:
203 return true;
204 default:
205 break;
208 if (GET_CODE (x) == PLUS)
210 rtx arg1 = XEXP (x, 0);
211 rtx arg2 = XEXP (x, 1);
212 rtx index = NULL_RTX;
214 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
215 index = arg2;
216 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
217 index = arg1;
218 else
219 return false;
221 switch (GET_CODE (index))
223 case CONST_INT:
225 /* Register Relative: REG + INT.
226 Only positive, mode-aligned, mode-sized
227 displacements are allowed. */
228 HOST_WIDE_INT val = INTVAL (index);
229 int factor;
231 if (val < 0)
232 return false;
234 switch (GET_MODE_SIZE (mode))
236 default:
237 case 4: factor = 4; break;
238 case 2: factor = 2; break;
239 case 1: factor = 1; break;
242 if (val > (65535 * factor))
243 return false;
244 return (val % factor) == 0;
247 case REG:
248 /* Unscaled Indexed Register Indirect: REG + REG
249 Size has to be "QI", REG has to be valid. */
250 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
252 case MULT:
254 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
255 Factor has to equal the mode size, REG has to be valid. */
256 rtx factor;
258 factor = XEXP (index, 1);
259 index = XEXP (index, 0);
261 return REG_P (index)
262 && RTX_OK_FOR_BASE (index, strict)
263 && CONST_INT_P (factor)
264 && GET_MODE_SIZE (mode) == INTVAL (factor);
267 default:
268 return false;
272 /* Small data area accesses turn into register relative offsets. */
273 return rx_small_data_operand (x);
276 /* Returns TRUE for simple memory addreses, ie ones
277 that do not involve register indirect addressing
278 or pre/post increment/decrement. */
280 bool
281 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
283 if (! rx_is_legitimate_address
284 (mode, mem, reload_in_progress || reload_completed))
285 return false;
287 switch (GET_CODE (mem))
289 case REG:
290 /* Simple memory addresses are OK. */
291 return true;
293 case PRE_DEC:
294 case POST_INC:
295 return false;
297 case PLUS:
299 rtx base, index;
301 /* Only allow REG+INT addressing. */
302 base = XEXP (mem, 0);
303 index = XEXP (mem, 1);
305 if (! RX_REG_P (base) || ! CONST_INT_P (index))
306 return false;
308 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
311 case SYMBOL_REF:
312 /* Can happen when small data is being supported.
313 Assume that it will be resolved into GP+INT. */
314 return true;
316 default:
317 gcc_unreachable ();
321 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
323 static bool
324 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
326 if (GET_CODE (addr) == CONST)
327 addr = XEXP (addr, 0);
329 switch (GET_CODE (addr))
331 /* --REG and REG++ only work in SImode. */
332 case PRE_DEC:
333 case POST_INC:
334 return true;
336 case MINUS:
337 case PLUS:
338 if (! REG_P (XEXP (addr, 0)))
339 return true;
341 addr = XEXP (addr, 1);
343 switch (GET_CODE (addr))
345 case REG:
346 /* REG+REG only works in SImode. */
347 return true;
349 case CONST_INT:
350 /* REG+INT is only mode independent if INT is a
351 multiple of 4, positive and will fit into 16-bits. */
352 if (((INTVAL (addr) & 3) == 0)
353 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
354 return false;
355 return true;
357 case SYMBOL_REF:
358 case LABEL_REF:
359 return true;
361 case MULT:
362 gcc_assert (REG_P (XEXP (addr, 0)));
363 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
364 /* REG+REG*SCALE is always mode dependent. */
365 return true;
367 default:
368 /* Not recognized, so treat as mode dependent. */
369 return true;
372 case CONST_INT:
373 case SYMBOL_REF:
374 case LABEL_REF:
375 case REG:
376 /* These are all mode independent. */
377 return false;
379 default:
380 /* Everything else is unrecognized,
381 so treat as mode dependent. */
382 return true;
386 /* A C compound statement to output to stdio stream FILE the
387 assembler syntax for an instruction operand that is a memory
388 reference whose address is ADDR. */
390 static void
391 rx_print_operand_address (FILE * file, rtx addr)
393 switch (GET_CODE (addr))
395 case REG:
396 fprintf (file, "[");
397 rx_print_operand (file, addr, 0);
398 fprintf (file, "]");
399 break;
401 case PRE_DEC:
402 fprintf (file, "[-");
403 rx_print_operand (file, XEXP (addr, 0), 0);
404 fprintf (file, "]");
405 break;
407 case POST_INC:
408 fprintf (file, "[");
409 rx_print_operand (file, XEXP (addr, 0), 0);
410 fprintf (file, "+]");
411 break;
413 case PLUS:
415 rtx arg1 = XEXP (addr, 0);
416 rtx arg2 = XEXP (addr, 1);
417 rtx base, index;
419 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
420 base = arg1, index = arg2;
421 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
422 base = arg2, index = arg1;
423 else
425 rx_print_operand (file, arg1, 0);
426 fprintf (file, " + ");
427 rx_print_operand (file, arg2, 0);
428 break;
431 if (REG_P (index) || GET_CODE (index) == MULT)
433 fprintf (file, "[");
434 rx_print_operand (file, index, 'A');
435 fprintf (file, ",");
437 else /* GET_CODE (index) == CONST_INT */
439 rx_print_operand (file, index, 'A');
440 fprintf (file, "[");
442 rx_print_operand (file, base, 0);
443 fprintf (file, "]");
444 break;
447 case CONST:
448 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
450 addr = XEXP (addr, 0);
451 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
453 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
454 case UNSPEC:
455 addr = XVECEXP (addr, 0, 0);
456 gcc_assert (CONST_INT_P (addr));
458 /* Fall through. */
459 case LABEL_REF:
460 case SYMBOL_REF:
461 fprintf (file, "#");
462 /* Fall through. */
463 default:
464 output_addr_const (file, addr);
465 break;
469 static void
470 rx_print_integer (FILE * file, HOST_WIDE_INT val)
472 if (IN_RANGE (val, -64, 64))
473 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
474 else
475 fprintf (file,
476 TARGET_AS100_SYNTAX
477 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
478 val);
481 static bool
482 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
484 const char * op = integer_asm_op (size, is_aligned);
486 if (! CONST_INT_P (x))
487 return default_assemble_integer (x, size, is_aligned);
489 if (op == NULL)
490 return false;
491 fputs (op, asm_out_file);
493 rx_print_integer (asm_out_file, INTVAL (x));
494 fputc ('\n', asm_out_file);
495 return true;
499 /* Handles the insertion of a single operand into the assembler output.
500 The %<letter> directives supported are:
502 %A Print an operand without a leading # character.
503 %B Print an integer comparison name.
504 %C Print a control register name.
505 %F Print a condition code flag name.
506 %G Register used for small-data-area addressing
507 %H Print high part of a DImode register, integer or address.
508 %L Print low part of a DImode register, integer or address.
509 %N Print the negation of the immediate value.
510 %P Register used for PID addressing
511 %Q If the operand is a MEM, then correctly generate
512 register indirect or register relative addressing.
513 %R Like %Q but for zero-extending loads. */
515 static void
516 rx_print_operand (FILE * file, rtx op, int letter)
518 bool unsigned_load = false;
519 bool print_hash = true;
521 if (letter == 'A'
522 && ((GET_CODE (op) == CONST
523 && GET_CODE (XEXP (op, 0)) == UNSPEC)
524 || GET_CODE (op) == UNSPEC))
526 print_hash = false;
527 letter = 0;
530 switch (letter)
532 case 'A':
533 /* Print an operand without a leading #. */
534 if (MEM_P (op))
535 op = XEXP (op, 0);
537 switch (GET_CODE (op))
539 case LABEL_REF:
540 case SYMBOL_REF:
541 output_addr_const (file, op);
542 break;
543 case CONST_INT:
544 fprintf (file, "%ld", (long) INTVAL (op));
545 break;
546 default:
547 rx_print_operand (file, op, 0);
548 break;
550 break;
552 case 'B':
554 enum rtx_code code = GET_CODE (op);
555 enum machine_mode mode = GET_MODE (XEXP (op, 0));
556 const char *ret;
558 if (mode == CC_Fmode)
560 /* C flag is undefined, and O flag carries unordered. None of the
561 branch combinations that include O use it helpfully. */
562 switch (code)
564 case ORDERED:
565 ret = "no";
566 break;
567 case UNORDERED:
568 ret = "o";
569 break;
570 case LT:
571 ret = "n";
572 break;
573 case GE:
574 ret = "pz";
575 break;
576 case EQ:
577 ret = "eq";
578 break;
579 case NE:
580 ret = "ne";
581 break;
582 default:
583 gcc_unreachable ();
586 else
588 unsigned int flags = flags_from_mode (mode);
590 switch (code)
592 case LT:
593 ret = (flags & CC_FLAG_O ? "lt" : "n");
594 break;
595 case GE:
596 ret = (flags & CC_FLAG_O ? "ge" : "pz");
597 break;
598 case GT:
599 ret = "gt";
600 break;
601 case LE:
602 ret = "le";
603 break;
604 case GEU:
605 ret = "geu";
606 break;
607 case LTU:
608 ret = "ltu";
609 break;
610 case GTU:
611 ret = "gtu";
612 break;
613 case LEU:
614 ret = "leu";
615 break;
616 case EQ:
617 ret = "eq";
618 break;
619 case NE:
620 ret = "ne";
621 break;
622 default:
623 gcc_unreachable ();
625 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
627 fputs (ret, file);
628 break;
631 case 'C':
632 gcc_assert (CONST_INT_P (op));
633 switch (INTVAL (op))
635 case 0: fprintf (file, "psw"); break;
636 case 2: fprintf (file, "usp"); break;
637 case 3: fprintf (file, "fpsw"); break;
638 case 4: fprintf (file, "cpen"); break;
639 case 8: fprintf (file, "bpsw"); break;
640 case 9: fprintf (file, "bpc"); break;
641 case 0xa: fprintf (file, "isp"); break;
642 case 0xb: fprintf (file, "fintv"); break;
643 case 0xc: fprintf (file, "intb"); break;
644 default:
645 warning (0, "unrecognized control register number: %d - using 'psw'",
646 (int) INTVAL (op));
647 fprintf (file, "psw");
648 break;
650 break;
652 case 'F':
653 gcc_assert (CONST_INT_P (op));
654 switch (INTVAL (op))
656 case 0: case 'c': case 'C': fprintf (file, "C"); break;
657 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
658 case 2: case 's': case 'S': fprintf (file, "S"); break;
659 case 3: case 'o': case 'O': fprintf (file, "O"); break;
660 case 8: case 'i': case 'I': fprintf (file, "I"); break;
661 case 9: case 'u': case 'U': fprintf (file, "U"); break;
662 default:
663 gcc_unreachable ();
665 break;
667 case 'G':
668 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
669 break;
671 case 'H':
672 switch (GET_CODE (op))
674 case REG:
675 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
676 break;
677 case CONST_INT:
679 HOST_WIDE_INT v = INTVAL (op);
681 fprintf (file, "#");
682 /* Trickery to avoid problems with shifting 32 bits at a time. */
683 v = v >> 16;
684 v = v >> 16;
685 rx_print_integer (file, v);
686 break;
688 case CONST_DOUBLE:
689 fprintf (file, "#");
690 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
691 break;
692 case MEM:
693 if (! WORDS_BIG_ENDIAN)
694 op = adjust_address (op, SImode, 4);
695 output_address (XEXP (op, 0));
696 break;
697 default:
698 gcc_unreachable ();
700 break;
702 case 'L':
703 switch (GET_CODE (op))
705 case REG:
706 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
707 break;
708 case CONST_INT:
709 fprintf (file, "#");
710 rx_print_integer (file, INTVAL (op) & 0xffffffff);
711 break;
712 case CONST_DOUBLE:
713 fprintf (file, "#");
714 rx_print_integer (file, CONST_DOUBLE_LOW (op));
715 break;
716 case MEM:
717 if (WORDS_BIG_ENDIAN)
718 op = adjust_address (op, SImode, 4);
719 output_address (XEXP (op, 0));
720 break;
721 default:
722 gcc_unreachable ();
724 break;
726 case 'N':
727 gcc_assert (CONST_INT_P (op));
728 fprintf (file, "#");
729 rx_print_integer (file, - INTVAL (op));
730 break;
732 case 'P':
733 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
734 break;
736 case 'R':
737 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) < 4);
738 unsigned_load = true;
739 /* Fall through. */
740 case 'Q':
741 if (MEM_P (op))
743 HOST_WIDE_INT offset;
744 rtx mem = op;
746 op = XEXP (op, 0);
748 if (REG_P (op))
749 offset = 0;
750 else if (GET_CODE (op) == PLUS)
752 rtx displacement;
754 if (REG_P (XEXP (op, 0)))
756 displacement = XEXP (op, 1);
757 op = XEXP (op, 0);
759 else
761 displacement = XEXP (op, 0);
762 op = XEXP (op, 1);
763 gcc_assert (REG_P (op));
766 gcc_assert (CONST_INT_P (displacement));
767 offset = INTVAL (displacement);
768 gcc_assert (offset >= 0);
770 fprintf (file, "%ld", offset);
772 else
773 gcc_unreachable ();
775 fprintf (file, "[");
776 rx_print_operand (file, op, 0);
777 fprintf (file, "].");
779 switch (GET_MODE_SIZE (GET_MODE (mem)))
781 case 1:
782 gcc_assert (offset <= 65535 * 1);
783 fprintf (file, unsigned_load ? "UB" : "B");
784 break;
785 case 2:
786 gcc_assert (offset % 2 == 0);
787 gcc_assert (offset <= 65535 * 2);
788 fprintf (file, unsigned_load ? "UW" : "W");
789 break;
790 case 4:
791 gcc_assert (offset % 4 == 0);
792 gcc_assert (offset <= 65535 * 4);
793 fprintf (file, "L");
794 break;
795 default:
796 gcc_unreachable ();
798 break;
801 /* Fall through. */
803 default:
804 if (GET_CODE (op) == CONST
805 && GET_CODE (XEXP (op, 0)) == UNSPEC)
806 op = XEXP (op, 0);
807 else if (GET_CODE (op) == CONST
808 && GET_CODE (XEXP (op, 0)) == PLUS
809 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
810 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
812 if (print_hash)
813 fprintf (file, "#");
814 fprintf (file, "(");
815 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
816 fprintf (file, " + ");
817 output_addr_const (file, XEXP (XEXP (op, 0), 1));
818 fprintf (file, ")");
819 return;
822 switch (GET_CODE (op))
824 case MULT:
825 /* Should be the scaled part of an
826 indexed register indirect address. */
828 rtx base = XEXP (op, 0);
829 rtx index = XEXP (op, 1);
831 /* Check for a swaped index register and scaling factor.
832 Not sure if this can happen, but be prepared to handle it. */
833 if (CONST_INT_P (base) && REG_P (index))
835 rtx tmp = base;
836 base = index;
837 index = tmp;
840 gcc_assert (REG_P (base));
841 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
842 gcc_assert (CONST_INT_P (index));
843 /* Do not try to verify the value of the scalar as it is based
844 on the mode of the MEM not the mode of the MULT. (Which
845 will always be SImode). */
846 fprintf (file, "%s", reg_names [REGNO (base)]);
847 break;
850 case MEM:
851 output_address (XEXP (op, 0));
852 break;
854 case PLUS:
855 output_address (op);
856 break;
858 case REG:
859 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
860 fprintf (file, "%s", reg_names [REGNO (op)]);
861 break;
863 case SUBREG:
864 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
865 fprintf (file, "%s", reg_names [subreg_regno (op)]);
866 break;
868 /* This will only be single precision.... */
869 case CONST_DOUBLE:
871 unsigned long val;
872 REAL_VALUE_TYPE rv;
874 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
875 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
876 if (print_hash)
877 fprintf (file, "#");
878 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
879 break;
882 case CONST_INT:
883 if (print_hash)
884 fprintf (file, "#");
885 rx_print_integer (file, INTVAL (op));
886 break;
888 case UNSPEC:
889 switch (XINT (op, 1))
891 case UNSPEC_PID_ADDR:
893 rtx sym, add;
895 if (print_hash)
896 fprintf (file, "#");
897 sym = XVECEXP (op, 0, 0);
898 add = NULL_RTX;
899 fprintf (file, "(");
900 if (GET_CODE (sym) == PLUS)
902 add = XEXP (sym, 1);
903 sym = XEXP (sym, 0);
905 output_addr_const (file, sym);
906 if (add != NULL_RTX)
908 fprintf (file, "+");
909 output_addr_const (file, add);
911 fprintf (file, "-__pid_base");
912 fprintf (file, ")");
913 return;
916 /* Fall through */
918 case CONST:
919 case SYMBOL_REF:
920 case LABEL_REF:
921 case CODE_LABEL:
922 rx_print_operand_address (file, op);
923 break;
925 default:
926 gcc_unreachable ();
928 break;
932 /* Maybe convert an operand into its PID format. */
935 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
937 if (rx_pid_data_operand (op) == PID_UNENCODED)
939 if (GET_CODE (op) == MEM)
941 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
942 op = replace_equiv_address (op, a);
944 else
946 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
949 if (copy_to_reg)
950 op = copy_to_mode_reg (GET_MODE (op), op);
952 return op;
955 /* Returns an assembler template for a move instruction. */
957 char *
958 rx_gen_move_template (rtx * operands, bool is_movu)
960 static char out_template [64];
961 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
962 const char * src_template;
963 const char * dst_template;
964 rtx dest = operands[0];
965 rtx src = operands[1];
967 /* Decide which extension, if any, should be given to the move instruction. */
968 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
970 case QImode:
971 /* The .B extension is not valid when
972 loading an immediate into a register. */
973 if (! REG_P (dest) || ! CONST_INT_P (src))
974 extension = ".B";
975 break;
976 case HImode:
977 if (! REG_P (dest) || ! CONST_INT_P (src))
978 /* The .W extension is not valid when
979 loading an immediate into a register. */
980 extension = ".W";
981 break;
982 case DFmode:
983 case DImode:
984 case SFmode:
985 case SImode:
986 extension = ".L";
987 break;
988 case VOIDmode:
989 /* This mode is used by constants. */
990 break;
991 default:
992 debug_rtx (src);
993 gcc_unreachable ();
996 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
998 gcc_assert (GET_MODE (src) != DImode);
999 gcc_assert (GET_MODE (src) != DFmode);
1001 src_template = "(%A1 - __pid_base)[%P1]";
1003 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1005 gcc_assert (GET_MODE (src) != DImode);
1006 gcc_assert (GET_MODE (src) != DFmode);
1008 src_template = "%%gp(%A1)[%G1]";
1010 else
1011 src_template = "%1";
1013 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1015 gcc_assert (GET_MODE (dest) != DImode);
1016 gcc_assert (GET_MODE (dest) != DFmode);
1018 dst_template = "%%gp(%A0)[%G0]";
1020 else
1021 dst_template = "%0";
1023 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1025 gcc_assert (! is_movu);
1027 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1028 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1029 else
1030 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1032 else
1033 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1034 extension, src_template, dst_template);
1035 return out_template;
1038 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1040 static inline unsigned int
1041 rx_round_up (unsigned int value, unsigned int alignment)
1043 alignment -= 1;
1044 return (value + alignment) & (~ alignment);
1047 /* Return the number of bytes in the argument registers
1048 occupied by an argument of type TYPE and mode MODE. */
1050 static unsigned int
1051 rx_function_arg_size (enum machine_mode mode, const_tree type)
1053 unsigned int num_bytes;
1055 num_bytes = (mode == BLKmode)
1056 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1057 return rx_round_up (num_bytes, UNITS_PER_WORD);
1060 #define NUM_ARG_REGS 4
1061 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1063 /* Return an RTL expression describing the register holding a function
1064 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1065 be passed on the stack. CUM describes the previous parameters to the
1066 function and NAMED is false if the parameter is part of a variable
1067 parameter list, or the last named parameter before the start of a
1068 variable parameter list. */
1070 static rtx
1071 rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
1072 const_tree type, bool named)
1074 unsigned int next_reg;
1075 unsigned int bytes_so_far = *get_cumulative_args (cum);
1076 unsigned int size;
1077 unsigned int rounded_size;
1079 /* An exploded version of rx_function_arg_size. */
1080 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1081 /* If the size is not known it cannot be passed in registers. */
1082 if (size < 1)
1083 return NULL_RTX;
1085 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1087 /* Don't pass this arg via registers if there
1088 are insufficient registers to hold all of it. */
1089 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1090 return NULL_RTX;
1092 /* Unnamed arguments and the last named argument in a
1093 variadic function are always passed on the stack. */
1094 if (!named)
1095 return NULL_RTX;
1097 /* Structures must occupy an exact number of registers,
1098 otherwise they are passed on the stack. */
1099 if ((type == NULL || AGGREGATE_TYPE_P (type))
1100 && (size % UNITS_PER_WORD) != 0)
1101 return NULL_RTX;
1103 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1105 return gen_rtx_REG (mode, next_reg);
1108 static void
1109 rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
1110 const_tree type, bool named ATTRIBUTE_UNUSED)
1112 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1115 static unsigned int
1116 rx_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
1117 const_tree type ATTRIBUTE_UNUSED)
1119 /* Older versions of the RX backend aligned all on-stack arguments
1120 to 32-bits. The RX C ABI however says that they should be
1121 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1122 if (TARGET_GCC_ABI)
1123 return STACK_BOUNDARY;
1125 if (type)
1127 if (DECL_P (type))
1128 return DECL_ALIGN (type);
1129 return TYPE_ALIGN (type);
1132 return PARM_BOUNDARY;
1135 /* Return an RTL describing where a function return value of type RET_TYPE
1136 is held. */
1138 static rtx
1139 rx_function_value (const_tree ret_type,
1140 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1141 bool outgoing ATTRIBUTE_UNUSED)
1143 enum machine_mode mode = TYPE_MODE (ret_type);
1145 /* RX ABI specifies that small integer types are
1146 promoted to int when returned by a function. */
1147 if (GET_MODE_SIZE (mode) > 0
1148 && GET_MODE_SIZE (mode) < 4
1149 && ! COMPLEX_MODE_P (mode)
1151 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1153 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1156 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1157 regard to function returns as does TARGET_FUNCTION_VALUE. */
1159 static enum machine_mode
1160 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1161 enum machine_mode mode,
1162 int * punsignedp ATTRIBUTE_UNUSED,
1163 const_tree funtype ATTRIBUTE_UNUSED,
1164 int for_return)
1166 if (for_return != 1
1167 || GET_MODE_SIZE (mode) >= 4
1168 || COMPLEX_MODE_P (mode)
1169 || GET_MODE_SIZE (mode) < 1)
1170 return mode;
1172 return SImode;
1175 static bool
1176 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1178 HOST_WIDE_INT size;
1180 if (TYPE_MODE (type) != BLKmode
1181 && ! AGGREGATE_TYPE_P (type))
1182 return false;
1184 size = int_size_in_bytes (type);
1185 /* Large structs and those whose size is not an
1186 exact multiple of 4 are returned in memory. */
1187 return size < 1
1188 || size > 16
1189 || (size % UNITS_PER_WORD) != 0;
1192 static rtx
1193 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1194 int incoming ATTRIBUTE_UNUSED)
1196 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1199 static bool
1200 rx_return_in_msb (const_tree valtype)
1202 return TARGET_BIG_ENDIAN_DATA
1203 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1206 /* Returns true if the provided function has the specified attribute. */
1208 static inline bool
1209 has_func_attr (const_tree decl, const char * func_attr)
1211 if (decl == NULL_TREE)
1212 decl = current_function_decl;
1214 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1217 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1219 static inline bool
1220 is_fast_interrupt_func (const_tree decl)
1222 return has_func_attr (decl, "fast_interrupt");
1225 /* Returns true if the provided function has the "interrupt" attribute. */
1227 static inline bool
1228 is_interrupt_func (const_tree decl)
1230 return has_func_attr (decl, "interrupt");
1233 /* Returns true if the provided function has the "naked" attribute. */
1235 static inline bool
1236 is_naked_func (const_tree decl)
1238 return has_func_attr (decl, "naked");
1241 static bool use_fixed_regs = false;
1243 static void
1244 rx_conditional_register_usage (void)
1246 static bool using_fixed_regs = false;
1248 if (TARGET_PID)
1250 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1251 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1254 if (rx_small_data_limit > 0)
1256 if (TARGET_PID)
1257 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1258 else
1259 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1261 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1264 if (use_fixed_regs != using_fixed_regs)
1266 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1267 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1269 if (use_fixed_regs)
1271 unsigned int r;
1273 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1274 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1276 /* This is for fast interrupt handlers. Any register in
1277 the range r10 to r13 (inclusive) that is currently
1278 marked as fixed is now a viable, call-used register. */
1279 for (r = 10; r <= 13; r++)
1280 if (fixed_regs[r])
1282 fixed_regs[r] = 0;
1283 call_used_regs[r] = 1;
1286 /* Mark r7 as fixed. This is just a hack to avoid
1287 altering the reg_alloc_order array so that the newly
1288 freed r10-r13 registers are the preferred registers. */
1289 fixed_regs[7] = call_used_regs[7] = 1;
1291 else
1293 /* Restore the normal register masks. */
1294 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1295 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1298 using_fixed_regs = use_fixed_regs;
1302 struct decl_chain
1304 tree fndecl;
1305 struct decl_chain * next;
1308 /* Stack of decls for which we have issued warnings. */
1309 static struct decl_chain * warned_decls = NULL;
1311 static void
1312 add_warned_decl (tree fndecl)
1314 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1316 warned->fndecl = fndecl;
1317 warned->next = warned_decls;
1318 warned_decls = warned;
1321 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1323 static bool
1324 already_warned (tree fndecl)
1326 struct decl_chain * warned;
1328 for (warned = warned_decls;
1329 warned != NULL;
1330 warned = warned->next)
1331 if (warned->fndecl == fndecl)
1332 return true;
1334 return false;
1337 /* Perform any actions necessary before starting to compile FNDECL.
1338 For the RX we use this to make sure that we have the correct
1339 set of register masks selected. If FNDECL is NULL then we are
1340 compiling top level things. */
1342 static void
1343 rx_set_current_function (tree fndecl)
1345 /* Remember the last target of rx_set_current_function. */
1346 static tree rx_previous_fndecl;
1347 bool prev_was_fast_interrupt;
1348 bool current_is_fast_interrupt;
1350 /* Only change the context if the function changes. This hook is called
1351 several times in the course of compiling a function, and we don't want
1352 to slow things down too much or call target_reinit when it isn't safe. */
1353 if (fndecl == rx_previous_fndecl)
1354 return;
1356 prev_was_fast_interrupt
1357 = rx_previous_fndecl
1358 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1360 current_is_fast_interrupt
1361 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1363 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1365 use_fixed_regs = current_is_fast_interrupt;
1366 target_reinit ();
1369 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1371 /* We do not warn about the first fast interrupt routine that
1372 we see. Instead we just push it onto the stack. */
1373 if (warned_decls == NULL)
1374 add_warned_decl (fndecl);
1376 /* Otherwise if this fast interrupt is one for which we have
1377 not already issued a warning, generate one and then push
1378 it onto the stack as well. */
1379 else if (! already_warned (fndecl))
1381 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1382 fndecl, warned_decls->fndecl);
1383 add_warned_decl (fndecl);
1387 rx_previous_fndecl = fndecl;
1390 /* Typical stack layout should looks like this after the function's prologue:
1393 -- ^
1394 | | \ |
1395 | | arguments saved | Increasing
1396 | | on the stack | addresses
1397 PARENT arg pointer -> | | /
1398 -------------------------- ---- -------------------
1399 CHILD |ret | return address
1401 | | \
1402 | | call saved
1403 | | registers
1404 | | /
1406 | | \
1407 | | local
1408 | | variables
1409 frame pointer -> | | /
1411 | | \
1412 | | outgoing | Decreasing
1413 | | arguments | addresses
1414 current stack pointer -> | | / |
1415 -------------------------- ---- ------------------ V
1416 | | */
1418 static unsigned int
1419 bit_count (unsigned int x)
1421 const unsigned int m1 = 0x55555555;
1422 const unsigned int m2 = 0x33333333;
1423 const unsigned int m4 = 0x0f0f0f0f;
1425 x -= (x >> 1) & m1;
1426 x = (x & m2) + ((x >> 2) & m2);
1427 x = (x + (x >> 4)) & m4;
1428 x += x >> 8;
1430 return (x + (x >> 16)) & 0x3f;
1433 #define MUST_SAVE_ACC_REGISTER \
1434 (TARGET_SAVE_ACC_REGISTER \
1435 && (is_interrupt_func (NULL_TREE) \
1436 || is_fast_interrupt_func (NULL_TREE)))
1438 /* Returns either the lowest numbered and highest numbered registers that
1439 occupy the call-saved area of the stack frame, if the registers are
1440 stored as a contiguous block, or else a bitmask of the individual
1441 registers if they are stored piecemeal.
1443 Also computes the size of the frame and the size of the outgoing
1444 arguments block (in bytes). */
1446 static void
1447 rx_get_stack_layout (unsigned int * lowest,
1448 unsigned int * highest,
1449 unsigned int * register_mask,
1450 unsigned int * frame_size,
1451 unsigned int * stack_size)
1453 unsigned int reg;
1454 unsigned int low;
1455 unsigned int high;
1456 unsigned int fixed_reg = 0;
1457 unsigned int save_mask;
1458 unsigned int pushed_mask;
1459 unsigned int unneeded_pushes;
1461 if (is_naked_func (NULL_TREE))
1463 /* Naked functions do not create their own stack frame.
1464 Instead the programmer must do that for us. */
1465 * lowest = 0;
1466 * highest = 0;
1467 * register_mask = 0;
1468 * frame_size = 0;
1469 * stack_size = 0;
1470 return;
1473 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1475 if ((df_regs_ever_live_p (reg)
1476 /* Always save all call clobbered registers inside non-leaf
1477 interrupt handlers, even if they are not live - they may
1478 be used in (non-interrupt aware) routines called from this one. */
1479 || (call_used_regs[reg]
1480 && is_interrupt_func (NULL_TREE)
1481 && ! crtl->is_leaf))
1482 && (! call_used_regs[reg]
1483 /* Even call clobbered registered must
1484 be pushed inside interrupt handlers. */
1485 || is_interrupt_func (NULL_TREE)
1486 /* Likewise for fast interrupt handlers, except registers r10 -
1487 r13. These are normally call-saved, but may have been set
1488 to call-used by rx_conditional_register_usage. If so then
1489 they can be used in the fast interrupt handler without
1490 saving them on the stack. */
1491 || (is_fast_interrupt_func (NULL_TREE)
1492 && ! IN_RANGE (reg, 10, 13))))
1494 if (low == 0)
1495 low = reg;
1496 high = reg;
1498 save_mask |= 1 << reg;
1501 /* Remember if we see a fixed register
1502 after having found the low register. */
1503 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1504 fixed_reg = reg;
1507 /* If we have to save the accumulator register, make sure
1508 that at least two registers are pushed into the frame. */
1509 if (MUST_SAVE_ACC_REGISTER
1510 && bit_count (save_mask) < 2)
1512 save_mask |= (1 << 13) | (1 << 14);
1513 if (low == 0)
1514 low = 13;
1515 if (high == 0 || low == high)
1516 high = low + 1;
1519 /* Decide if it would be faster fill in the call-saved area of the stack
1520 frame using multiple PUSH instructions instead of a single PUSHM
1521 instruction.
1523 SAVE_MASK is a bitmask of the registers that must be stored in the
1524 call-save area. PUSHED_MASK is a bitmask of the registers that would
1525 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1526 is a bitmask of those registers in pushed_mask that are not in
1527 save_mask.
1529 We use a simple heuristic that says that it is better to use
1530 multiple PUSH instructions if the number of unnecessary pushes is
1531 greater than the number of necessary pushes.
1533 We also use multiple PUSH instructions if there are any fixed registers
1534 between LOW and HIGH. The only way that this can happen is if the user
1535 has specified --fixed-<reg-name> on the command line and in such
1536 circumstances we do not want to touch the fixed registers at all.
1538 FIXME: Is it worth improving this heuristic ? */
1539 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1540 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1542 if ((fixed_reg && fixed_reg <= high)
1543 || (optimize_function_for_speed_p (cfun)
1544 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1546 /* Use multiple pushes. */
1547 * lowest = 0;
1548 * highest = 0;
1549 * register_mask = save_mask;
1551 else
1553 /* Use one push multiple instruction. */
1554 * lowest = low;
1555 * highest = high;
1556 * register_mask = 0;
1559 * frame_size = rx_round_up
1560 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1562 if (crtl->args.size > 0)
1563 * frame_size += rx_round_up
1564 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1566 * stack_size = rx_round_up
1567 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1570 /* Generate a PUSHM instruction that matches the given operands. */
1572 void
1573 rx_emit_stack_pushm (rtx * operands)
1575 HOST_WIDE_INT last_reg;
1576 rtx first_push;
1578 gcc_assert (CONST_INT_P (operands[0]));
1579 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1581 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1582 first_push = XVECEXP (operands[1], 0, 1);
1583 gcc_assert (SET_P (first_push));
1584 first_push = SET_SRC (first_push);
1585 gcc_assert (REG_P (first_push));
1587 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1588 reg_names [REGNO (first_push) - last_reg],
1589 reg_names [REGNO (first_push)]);
1592 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1594 static rtx
1595 gen_rx_store_vector (unsigned int low, unsigned int high)
1597 unsigned int i;
1598 unsigned int count = (high - low) + 2;
1599 rtx vector;
1601 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1603 XVECEXP (vector, 0, 0) =
1604 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1605 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1606 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1608 for (i = 0; i < count - 1; i++)
1609 XVECEXP (vector, 0, i + 1) =
1610 gen_rtx_SET (VOIDmode,
1611 gen_rtx_MEM (SImode,
1612 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1613 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1614 gen_rtx_REG (SImode, high - i));
1615 return vector;
1618 /* Mark INSN as being frame related. If it is a PARALLEL
1619 then mark each element as being frame related as well. */
1621 static void
1622 mark_frame_related (rtx insn)
1624 RTX_FRAME_RELATED_P (insn) = 1;
1625 insn = PATTERN (insn);
1627 if (GET_CODE (insn) == PARALLEL)
1629 unsigned int i;
1631 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1632 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1636 static bool
1637 ok_for_max_constant (HOST_WIDE_INT val)
1639 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1640 /* If there is no constraint on the size of constants
1641 used as operands, then any value is legitimate. */
1642 return true;
1644 /* rx_max_constant_size specifies the maximum number
1645 of bytes that can be used to hold a signed value. */
1646 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1647 ( 1 << (rx_max_constant_size * 8)));
1650 /* Generate an ADD of SRC plus VAL into DEST.
1651 Handles the case where VAL is too big for max_constant_value.
1652 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1654 static void
1655 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1657 rtx insn;
1659 if (val == NULL_RTX || INTVAL (val) == 0)
1661 gcc_assert (dest != src);
1663 insn = emit_move_insn (dest, src);
1665 else if (ok_for_max_constant (INTVAL (val)))
1666 insn = emit_insn (gen_addsi3 (dest, src, val));
1667 else
1669 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1670 will not reject it. */
1671 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1672 insn = emit_insn (gen_addsi3 (dest, src, val));
1674 if (is_frame_related)
1675 /* We have to provide our own frame related note here
1676 as the dwarf2out code cannot be expected to grok
1677 our unspec. */
1678 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1679 gen_rtx_SET (SImode, dest,
1680 gen_rtx_PLUS (SImode, src, val)));
1681 return;
1684 if (is_frame_related)
1685 RTX_FRAME_RELATED_P (insn) = 1;
1686 return;
1689 void
1690 rx_expand_prologue (void)
1692 unsigned int stack_size;
1693 unsigned int frame_size;
1694 unsigned int mask;
1695 unsigned int low;
1696 unsigned int high;
1697 unsigned int reg;
1698 rtx insn;
1700 /* Naked functions use their own, programmer provided prologues. */
1701 if (is_naked_func (NULL_TREE))
1702 return;
1704 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1706 if (flag_stack_usage_info)
1707 current_function_static_stack_size = frame_size + stack_size;
1709 /* If we use any of the callee-saved registers, save them now. */
1710 if (mask)
1712 /* Push registers in reverse order. */
1713 for (reg = CC_REGNUM; reg --;)
1714 if (mask & (1 << reg))
1716 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1717 mark_frame_related (insn);
1720 else if (low)
1722 if (high == low)
1723 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1724 else
1725 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1726 * UNITS_PER_WORD),
1727 gen_rx_store_vector (low, high)));
1728 mark_frame_related (insn);
1731 if (MUST_SAVE_ACC_REGISTER)
1733 unsigned int acc_high, acc_low;
1735 /* Interrupt handlers have to preserve the accumulator
1736 register if so requested by the user. Use the first
1737 two pushed registers as intermediaries. */
1738 if (mask)
1740 acc_low = acc_high = 0;
1742 for (reg = 1; reg < CC_REGNUM; reg ++)
1743 if (mask & (1 << reg))
1745 if (acc_low == 0)
1746 acc_low = reg;
1747 else
1749 acc_high = reg;
1750 break;
1754 /* We have assumed that there are at least two registers pushed... */
1755 gcc_assert (acc_high != 0);
1757 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1758 We just assume that they are zero. */
1759 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1760 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1761 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1762 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1764 else
1766 acc_low = low;
1767 acc_high = low + 1;
1769 /* We have assumed that there are at least two registers pushed... */
1770 gcc_assert (acc_high <= high);
1772 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1773 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1774 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1775 gen_rx_store_vector (acc_low, acc_high)));
1779 /* If needed, set up the frame pointer. */
1780 if (frame_pointer_needed)
1781 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1782 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1784 /* Allocate space for the outgoing args.
1785 If the stack frame has not already been set up then handle this as well. */
1786 if (stack_size)
1788 if (frame_size)
1790 if (frame_pointer_needed)
1791 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1792 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1793 else
1794 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1795 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1796 true);
1798 else
1799 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1800 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1802 else if (frame_size)
1804 if (! frame_pointer_needed)
1805 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1806 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1807 else
1808 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1809 true);
1813 static void
1814 add_vector_labels (FILE *file, const char *aname)
1816 tree vec_attr;
1817 tree val_attr;
1818 const char *vname = "vect";
1819 const char *s;
1820 int vnum;
1822 /* This node is for the vector/interrupt tag itself */
1823 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1824 if (!vec_attr)
1825 return;
1827 /* Now point it at the first argument */
1828 vec_attr = TREE_VALUE (vec_attr);
1830 /* Iterate through the arguments. */
1831 while (vec_attr)
1833 val_attr = TREE_VALUE (vec_attr);
1834 switch (TREE_CODE (val_attr))
1836 case STRING_CST:
1837 s = TREE_STRING_POINTER (val_attr);
1838 goto string_id_common;
1840 case IDENTIFIER_NODE:
1841 s = IDENTIFIER_POINTER (val_attr);
1843 string_id_common:
1844 if (strcmp (s, "$default") == 0)
1846 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1847 fprintf (file, "$tableentry$default$%s:\n", vname);
1849 else
1850 vname = s;
1851 break;
1853 case INTEGER_CST:
1854 vnum = TREE_INT_CST_LOW (val_attr);
1856 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1857 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1858 break;
1860 default:
1864 vec_attr = TREE_CHAIN (vec_attr);
1869 static void
1870 rx_output_function_prologue (FILE * file,
1871 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1873 add_vector_labels (file, "interrupt");
1874 add_vector_labels (file, "vector");
1876 if (is_fast_interrupt_func (NULL_TREE))
1877 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1879 if (is_interrupt_func (NULL_TREE))
1880 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1882 if (is_naked_func (NULL_TREE))
1883 asm_fprintf (file, "\t; Note: Naked Function\n");
1885 if (cfun->static_chain_decl != NULL)
1886 asm_fprintf (file, "\t; Note: Nested function declared "
1887 "inside another function.\n");
1889 if (crtl->calls_eh_return)
1890 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1893 /* Generate a POPM or RTSD instruction that matches the given operands. */
1895 void
1896 rx_emit_stack_popm (rtx * operands, bool is_popm)
1898 HOST_WIDE_INT stack_adjust;
1899 HOST_WIDE_INT last_reg;
1900 rtx first_push;
1902 gcc_assert (CONST_INT_P (operands[0]));
1903 stack_adjust = INTVAL (operands[0]);
1905 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1906 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1908 first_push = XVECEXP (operands[1], 0, 1);
1909 gcc_assert (SET_P (first_push));
1910 first_push = SET_DEST (first_push);
1911 gcc_assert (REG_P (first_push));
1913 if (is_popm)
1914 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1915 reg_names [REGNO (first_push)],
1916 reg_names [REGNO (first_push) + last_reg]);
1917 else
1918 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1919 (int) stack_adjust,
1920 reg_names [REGNO (first_push)],
1921 reg_names [REGNO (first_push) + last_reg]);
1924 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1926 static rtx
1927 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1929 unsigned int i;
1930 unsigned int bias = 3;
1931 unsigned int count = (high - low) + bias;
1932 rtx vector;
1934 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1936 XVECEXP (vector, 0, 0) =
1937 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1938 plus_constant (Pmode, stack_pointer_rtx, adjust));
1940 for (i = 0; i < count - 2; i++)
1941 XVECEXP (vector, 0, i + 1) =
1942 gen_rtx_SET (VOIDmode,
1943 gen_rtx_REG (SImode, low + i),
1944 gen_rtx_MEM (SImode,
1945 i == 0 ? stack_pointer_rtx
1946 : plus_constant (Pmode, stack_pointer_rtx,
1947 i * UNITS_PER_WORD)));
1949 XVECEXP (vector, 0, count - 1) = ret_rtx;
1951 return vector;
1954 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1956 static rtx
1957 gen_rx_popm_vector (unsigned int low, unsigned int high)
1959 unsigned int i;
1960 unsigned int count = (high - low) + 2;
1961 rtx vector;
1963 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1965 XVECEXP (vector, 0, 0) =
1966 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1967 plus_constant (Pmode, stack_pointer_rtx,
1968 (count - 1) * UNITS_PER_WORD));
1970 for (i = 0; i < count - 1; i++)
1971 XVECEXP (vector, 0, i + 1) =
1972 gen_rtx_SET (VOIDmode,
1973 gen_rtx_REG (SImode, low + i),
1974 gen_rtx_MEM (SImode,
1975 i == 0 ? stack_pointer_rtx
1976 : plus_constant (Pmode, stack_pointer_rtx,
1977 i * UNITS_PER_WORD)));
1979 return vector;
1982 /* Returns true if a simple return insn can be used. */
1984 bool
1985 rx_can_use_simple_return (void)
1987 unsigned int low;
1988 unsigned int high;
1989 unsigned int frame_size;
1990 unsigned int stack_size;
1991 unsigned int register_mask;
1993 if (is_naked_func (NULL_TREE)
1994 || is_fast_interrupt_func (NULL_TREE)
1995 || is_interrupt_func (NULL_TREE))
1996 return false;
1998 rx_get_stack_layout (& low, & high, & register_mask,
1999 & frame_size, & stack_size);
2001 return (register_mask == 0
2002 && (frame_size + stack_size) == 0
2003 && low == 0);
2006 void
2007 rx_expand_epilogue (bool is_sibcall)
2009 unsigned int low;
2010 unsigned int high;
2011 unsigned int frame_size;
2012 unsigned int stack_size;
2013 unsigned int register_mask;
2014 unsigned int regs_size;
2015 unsigned int reg;
2016 unsigned HOST_WIDE_INT total_size;
2018 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2019 cannot guarantee that the register holding the function address is a
2020 call-used register. If it is a call-saved register then the stack
2021 pop instructions generated in the epilogue will corrupt the address
2022 before it is used.
2024 Creating a new call-used-only register class works but then the
2025 reload pass gets stuck because it cannot always find a call-used
2026 register for spilling sibcalls.
2028 The other possible solution is for this pass to scan forward for the
2029 sibcall instruction (if it has been generated) and work out if it
2030 is an indirect sibcall using a call-saved register. If it is then
2031 the address can copied into a call-used register in this epilogue
2032 code and the sibcall instruction modified to use that register. */
2034 if (is_naked_func (NULL_TREE))
2036 gcc_assert (! is_sibcall);
2038 /* Naked functions use their own, programmer provided epilogues.
2039 But, in order to keep gcc happy we have to generate some kind of
2040 epilogue RTL. */
2041 emit_jump_insn (gen_naked_return ());
2042 return;
2045 rx_get_stack_layout (& low, & high, & register_mask,
2046 & frame_size, & stack_size);
2048 total_size = frame_size + stack_size;
2049 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2051 /* See if we are unable to use the special stack frame deconstruct and
2052 return instructions. In most cases we can use them, but the exceptions
2053 are:
2055 - Sibling calling functions deconstruct the frame but do not return to
2056 their caller. Instead they branch to their sibling and allow their
2057 return instruction to return to this function's parent.
2059 - Fast and normal interrupt handling functions have to use special
2060 return instructions.
2062 - Functions where we have pushed a fragmented set of registers into the
2063 call-save area must have the same set of registers popped. */
2064 if (is_sibcall
2065 || is_fast_interrupt_func (NULL_TREE)
2066 || is_interrupt_func (NULL_TREE)
2067 || register_mask)
2069 /* Cannot use the special instructions - deconstruct by hand. */
2070 if (total_size)
2071 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2072 GEN_INT (total_size), false);
2074 if (MUST_SAVE_ACC_REGISTER)
2076 unsigned int acc_low, acc_high;
2078 /* Reverse the saving of the accumulator register onto the stack.
2079 Note we must adjust the saved "low" accumulator value as it
2080 is really the middle 32-bits of the accumulator. */
2081 if (register_mask)
2083 acc_low = acc_high = 0;
2085 for (reg = 1; reg < CC_REGNUM; reg ++)
2086 if (register_mask & (1 << reg))
2088 if (acc_low == 0)
2089 acc_low = reg;
2090 else
2092 acc_high = reg;
2093 break;
2096 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2097 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2099 else
2101 acc_low = low;
2102 acc_high = low + 1;
2103 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2104 gen_rx_popm_vector (acc_low, acc_high)));
2107 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2108 gen_rtx_REG (SImode, acc_low),
2109 GEN_INT (16)));
2110 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2111 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2114 if (register_mask)
2116 for (reg = 0; reg < CC_REGNUM; reg ++)
2117 if (register_mask & (1 << reg))
2118 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
2120 else if (low)
2122 if (high == low)
2123 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2124 else
2125 emit_insn (gen_stack_popm (GEN_INT (regs_size),
2126 gen_rx_popm_vector (low, high)));
2129 if (is_fast_interrupt_func (NULL_TREE))
2131 gcc_assert (! is_sibcall);
2132 emit_jump_insn (gen_fast_interrupt_return ());
2134 else if (is_interrupt_func (NULL_TREE))
2136 gcc_assert (! is_sibcall);
2137 emit_jump_insn (gen_exception_return ());
2139 else if (! is_sibcall)
2140 emit_jump_insn (gen_simple_return ());
2142 return;
2145 /* If we allocated space on the stack, free it now. */
2146 if (total_size)
2148 unsigned HOST_WIDE_INT rtsd_size;
2150 /* See if we can use the RTSD instruction. */
2151 rtsd_size = total_size + regs_size;
2152 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2154 if (low)
2155 emit_jump_insn (gen_pop_and_return
2156 (GEN_INT (rtsd_size),
2157 gen_rx_rtsd_vector (rtsd_size, low, high)));
2158 else
2159 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2161 return;
2164 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2165 GEN_INT (total_size), false);
2168 if (low)
2169 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2170 gen_rx_rtsd_vector (regs_size,
2171 low, high)));
2172 else
2173 emit_jump_insn (gen_simple_return ());
2177 /* Compute the offset (in words) between FROM (arg pointer
2178 or frame pointer) and TO (frame pointer or stack pointer).
2179 See ASCII art comment at the start of rx_expand_prologue
2180 for more information. */
2183 rx_initial_elimination_offset (int from, int to)
2185 unsigned int low;
2186 unsigned int high;
2187 unsigned int frame_size;
2188 unsigned int stack_size;
2189 unsigned int mask;
2191 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2193 if (from == ARG_POINTER_REGNUM)
2195 /* Extend the computed size of the stack frame to
2196 include the registers pushed in the prologue. */
2197 if (low)
2198 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2199 else
2200 frame_size += bit_count (mask) * UNITS_PER_WORD;
2202 /* Remember to include the return address. */
2203 frame_size += 1 * UNITS_PER_WORD;
2205 if (to == FRAME_POINTER_REGNUM)
2206 return frame_size;
2208 gcc_assert (to == STACK_POINTER_REGNUM);
2209 return frame_size + stack_size;
2212 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2213 return stack_size;
2216 /* Decide if a variable should go into one of the small data sections. */
2218 static bool
2219 rx_in_small_data (const_tree decl)
2221 int size;
2222 const char * section;
2224 if (rx_small_data_limit == 0)
2225 return false;
2227 if (TREE_CODE (decl) != VAR_DECL)
2228 return false;
2230 /* We do not put read-only variables into a small data area because
2231 they would be placed with the other read-only sections, far away
2232 from the read-write data sections, and we only have one small
2233 data area pointer.
2234 Similarly commons are placed in the .bss section which might be
2235 far away (and out of alignment with respect to) the .data section. */
2236 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2237 return false;
2239 section = DECL_SECTION_NAME (decl);
2240 if (section)
2241 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2243 size = int_size_in_bytes (TREE_TYPE (decl));
2245 return (size > 0) && (size <= rx_small_data_limit);
2248 /* Return a section for X.
2249 The only special thing we do here is to honor small data. */
2251 static section *
2252 rx_select_rtx_section (enum machine_mode mode,
2253 rtx x,
2254 unsigned HOST_WIDE_INT align)
2256 if (rx_small_data_limit > 0
2257 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2258 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2259 return sdata_section;
2261 return default_elf_select_rtx_section (mode, x, align);
2264 static section *
2265 rx_select_section (tree decl,
2266 int reloc,
2267 unsigned HOST_WIDE_INT align)
2269 if (rx_small_data_limit > 0)
2271 switch (categorize_decl_for_section (decl, reloc))
2273 case SECCAT_SDATA: return sdata_section;
2274 case SECCAT_SBSS: return sbss_section;
2275 case SECCAT_SRODATA:
2276 /* Fall through. We do not put small, read only
2277 data into the C_2 section because we are not
2278 using the C_2 section. We do not use the C_2
2279 section because it is located with the other
2280 read-only data sections, far away from the read-write
2281 data sections and we only have one small data
2282 pointer (r13). */
2283 default:
2284 break;
2288 /* If we are supporting the Renesas assembler
2289 we cannot use mergeable sections. */
2290 if (TARGET_AS100_SYNTAX)
2291 switch (categorize_decl_for_section (decl, reloc))
2293 case SECCAT_RODATA_MERGE_CONST:
2294 case SECCAT_RODATA_MERGE_STR_INIT:
2295 case SECCAT_RODATA_MERGE_STR:
2296 return readonly_data_section;
2298 default:
2299 break;
2302 return default_elf_select_section (decl, reloc, align);
2305 enum rx_builtin
2307 RX_BUILTIN_BRK,
2308 RX_BUILTIN_CLRPSW,
2309 RX_BUILTIN_INT,
2310 RX_BUILTIN_MACHI,
2311 RX_BUILTIN_MACLO,
2312 RX_BUILTIN_MULHI,
2313 RX_BUILTIN_MULLO,
2314 RX_BUILTIN_MVFACHI,
2315 RX_BUILTIN_MVFACMI,
2316 RX_BUILTIN_MVFC,
2317 RX_BUILTIN_MVTACHI,
2318 RX_BUILTIN_MVTACLO,
2319 RX_BUILTIN_MVTC,
2320 RX_BUILTIN_MVTIPL,
2321 RX_BUILTIN_RACW,
2322 RX_BUILTIN_REVW,
2323 RX_BUILTIN_RMPA,
2324 RX_BUILTIN_ROUND,
2325 RX_BUILTIN_SETPSW,
2326 RX_BUILTIN_WAIT,
2327 RX_BUILTIN_max
2330 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2332 static void
2333 rx_init_builtins (void)
2335 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2336 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2337 add_builtin_function ("__builtin_rx_" LC_NAME, \
2338 build_function_type_list (RET_TYPE##_type_node, \
2339 NULL_TREE), \
2340 RX_BUILTIN_##UC_NAME, \
2341 BUILT_IN_MD, NULL, NULL_TREE)
2343 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2344 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2345 add_builtin_function ("__builtin_rx_" LC_NAME, \
2346 build_function_type_list (RET_TYPE##_type_node, \
2347 ARG_TYPE##_type_node, \
2348 NULL_TREE), \
2349 RX_BUILTIN_##UC_NAME, \
2350 BUILT_IN_MD, NULL, NULL_TREE)
2352 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2353 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2354 add_builtin_function ("__builtin_rx_" LC_NAME, \
2355 build_function_type_list (RET_TYPE##_type_node, \
2356 ARG_TYPE1##_type_node,\
2357 ARG_TYPE2##_type_node,\
2358 NULL_TREE), \
2359 RX_BUILTIN_##UC_NAME, \
2360 BUILT_IN_MD, NULL, NULL_TREE)
2362 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2363 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2364 add_builtin_function ("__builtin_rx_" LC_NAME, \
2365 build_function_type_list (RET_TYPE##_type_node, \
2366 ARG_TYPE1##_type_node,\
2367 ARG_TYPE2##_type_node,\
2368 ARG_TYPE3##_type_node,\
2369 NULL_TREE), \
2370 RX_BUILTIN_##UC_NAME, \
2371 BUILT_IN_MD, NULL, NULL_TREE)
2373 ADD_RX_BUILTIN0 (BRK, "brk", void);
2374 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2375 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2376 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2377 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2378 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2379 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2380 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2381 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2382 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2383 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2384 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2385 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
2386 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2387 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
2388 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
2389 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2390 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2391 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
2392 ADD_RX_BUILTIN0 (WAIT, "wait", void);
2395 /* Return the RX builtin for CODE. */
2397 static tree
2398 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2400 if (code >= RX_BUILTIN_max)
2401 return error_mark_node;
2403 return rx_builtins[code];
2406 static rtx
2407 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2409 if (reg && ! REG_P (arg))
2410 arg = force_reg (SImode, arg);
2412 emit_insn (gen_func (arg));
2414 return NULL_RTX;
2417 static rtx
2418 rx_expand_builtin_mvtc (tree exp)
2420 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2421 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2423 if (! CONST_INT_P (arg1))
2424 return NULL_RTX;
2426 if (! REG_P (arg2))
2427 arg2 = force_reg (SImode, arg2);
2429 emit_insn (gen_mvtc (arg1, arg2));
2431 return NULL_RTX;
2434 static rtx
2435 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2437 rtx arg = expand_normal (t_arg);
2439 if (! CONST_INT_P (arg))
2440 return NULL_RTX;
2442 if (target == NULL_RTX)
2443 return NULL_RTX;
2445 if (! REG_P (target))
2446 target = force_reg (SImode, target);
2448 emit_insn (gen_mvfc (target, arg));
2450 return target;
2453 static rtx
2454 rx_expand_builtin_mvtipl (rtx arg)
2456 /* The RX610 does not support the MVTIPL instruction. */
2457 if (rx_cpu_type == RX610)
2458 return NULL_RTX;
2460 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2461 return NULL_RTX;
2463 emit_insn (gen_mvtipl (arg));
2465 return NULL_RTX;
2468 static rtx
2469 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2471 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2472 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2474 if (! REG_P (arg1))
2475 arg1 = force_reg (SImode, arg1);
2477 if (! REG_P (arg2))
2478 arg2 = force_reg (SImode, arg2);
2480 emit_insn (gen_func (arg1, arg2));
2482 return NULL_RTX;
2485 static rtx
2486 rx_expand_int_builtin_1_arg (rtx arg,
2487 rtx target,
2488 rtx (* gen_func)(rtx, rtx),
2489 bool mem_ok)
2491 if (! REG_P (arg))
2492 if (!mem_ok || ! MEM_P (arg))
2493 arg = force_reg (SImode, arg);
2495 if (target == NULL_RTX || ! REG_P (target))
2496 target = gen_reg_rtx (SImode);
2498 emit_insn (gen_func (target, arg));
2500 return target;
2503 static rtx
2504 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2506 if (target == NULL_RTX || ! REG_P (target))
2507 target = gen_reg_rtx (SImode);
2509 emit_insn (gen_func (target));
2511 return target;
2514 static rtx
2515 rx_expand_builtin_round (rtx arg, rtx target)
2517 if ((! REG_P (arg) && ! MEM_P (arg))
2518 || GET_MODE (arg) != SFmode)
2519 arg = force_reg (SFmode, arg);
2521 if (target == NULL_RTX || ! REG_P (target))
2522 target = gen_reg_rtx (SImode);
2524 emit_insn (gen_lrintsf2 (target, arg));
2526 return target;
2529 static int
2530 valid_psw_flag (rtx op, const char *which)
2532 static int mvtc_inform_done = 0;
2534 if (GET_CODE (op) == CONST_INT)
2535 switch (INTVAL (op))
2537 case 0: case 'c': case 'C':
2538 case 1: case 'z': case 'Z':
2539 case 2: case 's': case 'S':
2540 case 3: case 'o': case 'O':
2541 case 8: case 'i': case 'I':
2542 case 9: case 'u': case 'U':
2543 return 1;
2546 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2547 if (!mvtc_inform_done)
2548 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2549 mvtc_inform_done = 1;
2551 return 0;
2554 static rtx
2555 rx_expand_builtin (tree exp,
2556 rtx target,
2557 rtx subtarget ATTRIBUTE_UNUSED,
2558 enum machine_mode mode ATTRIBUTE_UNUSED,
2559 int ignore ATTRIBUTE_UNUSED)
2561 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2562 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2563 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2564 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2566 switch (fcode)
2568 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2569 case RX_BUILTIN_CLRPSW:
2570 if (!valid_psw_flag (op, "clrpsw"))
2571 return NULL_RTX;
2572 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2573 case RX_BUILTIN_SETPSW:
2574 if (!valid_psw_flag (op, "setpsw"))
2575 return NULL_RTX;
2576 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2577 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2578 (op, gen_int, false);
2579 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2580 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2581 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2582 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2583 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2584 (target, gen_mvfachi);
2585 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2586 (target, gen_mvfacmi);
2587 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2588 (op, gen_mvtachi, true);
2589 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2590 (op, gen_mvtaclo, true);
2591 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2592 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2593 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2594 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2595 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2596 (op, gen_racw, false);
2597 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2598 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2599 (op, target, gen_revw, false);
2600 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2602 default:
2603 internal_error ("bad builtin code");
2604 break;
2607 return NULL_RTX;
2610 /* Place an element into a constructor or destructor section.
2611 Like default_ctor_section_asm_out_constructor in varasm.c
2612 except that it uses .init_array (or .fini_array) and it
2613 handles constructor priorities. */
2615 static void
2616 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2618 section * s;
2620 if (priority != DEFAULT_INIT_PRIORITY)
2622 char buf[18];
2624 sprintf (buf, "%s.%.5u",
2625 is_ctor ? ".init_array" : ".fini_array",
2626 priority);
2627 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2629 else if (is_ctor)
2630 s = ctors_section;
2631 else
2632 s = dtors_section;
2634 switch_to_section (s);
2635 assemble_align (POINTER_SIZE);
2636 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2639 static void
2640 rx_elf_asm_constructor (rtx symbol, int priority)
2642 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2645 static void
2646 rx_elf_asm_destructor (rtx symbol, int priority)
2648 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2651 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2653 static tree
2654 rx_handle_func_attribute (tree * node,
2655 tree name,
2656 tree args,
2657 int flags ATTRIBUTE_UNUSED,
2658 bool * no_add_attrs)
2660 gcc_assert (DECL_P (* node));
2662 if (TREE_CODE (* node) != FUNCTION_DECL)
2664 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2665 name);
2666 * no_add_attrs = true;
2669 /* FIXME: We ought to check for conflicting attributes. */
2671 /* FIXME: We ought to check that the interrupt and exception
2672 handler attributes have been applied to void functions. */
2673 return NULL_TREE;
2676 /* Check "vector" attribute. */
2678 static tree
2679 rx_handle_vector_attribute (tree * node,
2680 tree name,
2681 tree args,
2682 int flags ATTRIBUTE_UNUSED,
2683 bool * no_add_attrs)
2685 gcc_assert (DECL_P (* node));
2686 gcc_assert (args != NULL_TREE);
2688 if (TREE_CODE (* node) != FUNCTION_DECL)
2690 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2691 name);
2692 * no_add_attrs = true;
2695 return NULL_TREE;
2698 /* Table of RX specific attributes. */
2699 const struct attribute_spec rx_attribute_table[] =
2701 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2702 affects_type_identity. */
2703 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2704 false },
2705 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
2706 false },
2707 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2708 false },
2709 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2710 false },
2711 { NULL, 0, 0, false, false, false, NULL, false }
2714 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2716 static void
2717 rx_override_options_after_change (void)
2719 static bool first_time = TRUE;
2721 if (first_time)
2723 /* If this is the first time through and the user has not disabled
2724 the use of RX FPU hardware then enable -ffinite-math-only,
2725 since the FPU instructions do not support NaNs and infinities. */
2726 if (TARGET_USE_FPU)
2727 flag_finite_math_only = 1;
2729 first_time = FALSE;
2731 else
2733 /* Alert the user if they are changing the optimization options
2734 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2735 if (TARGET_USE_FPU
2736 && !flag_finite_math_only)
2737 warning (0, "RX FPU instructions do not support NaNs and infinities");
2741 static void
2742 rx_option_override (void)
2744 unsigned int i;
2745 cl_deferred_option *opt;
2746 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2748 if (v)
2749 FOR_EACH_VEC_ELT (*v, i, opt)
2751 switch (opt->opt_index)
2753 case OPT_mint_register_:
2754 switch (opt->value)
2756 case 4:
2757 fixed_regs[10] = call_used_regs [10] = 1;
2758 /* Fall through. */
2759 case 3:
2760 fixed_regs[11] = call_used_regs [11] = 1;
2761 /* Fall through. */
2762 case 2:
2763 fixed_regs[12] = call_used_regs [12] = 1;
2764 /* Fall through. */
2765 case 1:
2766 fixed_regs[13] = call_used_regs [13] = 1;
2767 /* Fall through. */
2768 case 0:
2769 rx_num_interrupt_regs = opt->value;
2770 break;
2771 default:
2772 rx_num_interrupt_regs = 0;
2773 /* Error message already given because rx_handle_option
2774 returned false. */
2775 break;
2777 break;
2779 default:
2780 gcc_unreachable ();
2784 /* This target defaults to strict volatile bitfields. */
2785 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2786 flag_strict_volatile_bitfields = 1;
2788 rx_override_options_after_change ();
2790 if (align_jumps == 0 && ! optimize_size)
2791 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 2 : 3);
2792 if (align_loops == 0 && ! optimize_size)
2793 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 2 : 3);
2794 if (align_labels == 0 && ! optimize_size)
2795 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 2 : 3);
2799 static bool
2800 rx_allocate_stack_slots_for_args (void)
2802 /* Naked functions should not allocate stack slots for arguments. */
2803 return ! is_naked_func (NULL_TREE);
2806 static bool
2807 rx_func_attr_inlinable (const_tree decl)
2809 return ! is_fast_interrupt_func (decl)
2810 && ! is_interrupt_func (decl)
2811 && ! is_naked_func (decl);
2814 static bool
2815 rx_warn_func_return (tree decl)
2817 /* Naked functions are implemented entirely in assembly, including the
2818 return sequence, so suppress warnings about this. */
2819 return !is_naked_func (decl);
2822 /* Return nonzero if it is ok to make a tail-call to DECL,
2823 a function_decl or NULL if this is an indirect call, using EXP */
2825 static bool
2826 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2828 /* Do not allow indirect tailcalls. The
2829 sibcall patterns do not support them. */
2830 if (decl == NULL)
2831 return false;
2833 /* Never tailcall from inside interrupt handlers or naked functions. */
2834 if (is_fast_interrupt_func (NULL_TREE)
2835 || is_interrupt_func (NULL_TREE)
2836 || is_naked_func (NULL_TREE))
2837 return false;
2839 return true;
2842 static void
2843 rx_file_start (void)
2845 if (! TARGET_AS100_SYNTAX)
2846 default_file_start ();
2849 static bool
2850 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2852 /* The packed attribute overrides the MS behaviour. */
2853 return ! TYPE_PACKED (record_type);
2856 /* Returns true if X a legitimate constant for an immediate
2857 operand on the RX. X is already known to satisfy CONSTANT_P. */
2859 bool
2860 rx_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2862 switch (GET_CODE (x))
2864 case CONST:
2865 x = XEXP (x, 0);
2867 if (GET_CODE (x) == PLUS)
2869 if (! CONST_INT_P (XEXP (x, 1)))
2870 return false;
2872 /* GCC would not pass us CONST_INT + CONST_INT so we
2873 know that we have {SYMBOL|LABEL} + CONST_INT. */
2874 x = XEXP (x, 0);
2875 gcc_assert (! CONST_INT_P (x));
2878 switch (GET_CODE (x))
2880 case LABEL_REF:
2881 case SYMBOL_REF:
2882 return true;
2884 case UNSPEC:
2885 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2887 default:
2888 /* FIXME: Can this ever happen ? */
2889 gcc_unreachable ();
2891 break;
2893 case LABEL_REF:
2894 case SYMBOL_REF:
2895 return true;
2896 case CONST_DOUBLE:
2897 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2898 case CONST_VECTOR:
2899 return false;
2900 default:
2901 gcc_assert (CONST_INT_P (x));
2902 break;
2905 return ok_for_max_constant (INTVAL (x));
2908 static int
2909 rx_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
2910 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2912 rtx a, b;
2914 if (GET_CODE (addr) != PLUS)
2915 return COSTS_N_INSNS (1);
2917 a = XEXP (addr, 0);
2918 b = XEXP (addr, 1);
2920 if (REG_P (a) && REG_P (b))
2921 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2922 return COSTS_N_INSNS (4);
2924 if (speed)
2925 /* [REG+OFF] is just as fast as [REG]. */
2926 return COSTS_N_INSNS (1);
2928 if (CONST_INT_P (b)
2929 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2930 /* Try to discourage REG + <large OFF> when optimizing for size. */
2931 return COSTS_N_INSNS (2);
2933 return COSTS_N_INSNS (1);
2936 static bool
2937 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2939 /* We can always eliminate to the frame pointer.
2940 We can eliminate to the stack pointer unless a frame
2941 pointer is needed. */
2943 return to == FRAME_POINTER_REGNUM
2944 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2948 static void
2949 rx_trampoline_template (FILE * file)
2951 /* Output assembler code for a block containing the constant
2952 part of a trampoline, leaving space for the variable parts.
2954 On the RX, (where r8 is the static chain regnum) the trampoline
2955 looks like:
2957 mov #<static chain value>, r8
2958 mov #<function's address>, r9
2959 jmp r9
2961 In big-endian-data-mode however instructions are read into the CPU
2962 4 bytes at a time. These bytes are then swapped around before being
2963 passed to the decoder. So...we must partition our trampoline into
2964 4 byte packets and swap these packets around so that the instruction
2965 reader will reverse the process. But, in order to avoid splitting
2966 the 32-bit constants across these packet boundaries, (making inserting
2967 them into the constructed trampoline very difficult) we have to pad the
2968 instruction sequence with NOP insns. ie:
2972 mov.l #<...>, r8
2975 mov.l #<...>, r9
2976 jmp r9
2978 nop */
2980 if (! TARGET_BIG_ENDIAN_DATA)
2982 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2983 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2984 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2986 else
2988 char r8 = '0' + STATIC_CHAIN_REGNUM;
2989 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2991 if (TARGET_AS100_SYNTAX)
2993 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2994 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2995 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2996 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2997 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2999 else
3001 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3002 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3003 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3004 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3005 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3010 static void
3011 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3013 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3015 emit_block_move (tramp, assemble_trampoline_template (),
3016 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3018 if (TARGET_BIG_ENDIAN_DATA)
3020 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3021 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3023 else
3025 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3026 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3030 static int
3031 rx_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
3032 reg_class_t regclass ATTRIBUTE_UNUSED,
3033 bool in)
3035 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3038 /* Convert a CC_MODE to the set of flags that it represents. */
3040 static unsigned int
3041 flags_from_mode (enum machine_mode mode)
3043 switch (mode)
3045 case CC_ZSmode:
3046 return CC_FLAG_S | CC_FLAG_Z;
3047 case CC_ZSOmode:
3048 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3049 case CC_ZSCmode:
3050 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3051 case CCmode:
3052 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3053 case CC_Fmode:
3054 return CC_FLAG_FP;
3055 default:
3056 gcc_unreachable ();
3060 /* Convert a set of flags to a CC_MODE that can implement it. */
3062 static enum machine_mode
3063 mode_from_flags (unsigned int f)
3065 if (f & CC_FLAG_FP)
3066 return CC_Fmode;
3067 if (f & CC_FLAG_O)
3069 if (f & CC_FLAG_C)
3070 return CCmode;
3071 else
3072 return CC_ZSOmode;
3074 else if (f & CC_FLAG_C)
3075 return CC_ZSCmode;
3076 else
3077 return CC_ZSmode;
3080 /* Convert an RTX_CODE to the set of flags needed to implement it.
3081 This assumes an integer comparison. */
3083 static unsigned int
3084 flags_from_code (enum rtx_code code)
3086 switch (code)
3088 case LT:
3089 case GE:
3090 return CC_FLAG_S;
3091 case GT:
3092 case LE:
3093 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3094 case GEU:
3095 case LTU:
3096 return CC_FLAG_C;
3097 case GTU:
3098 case LEU:
3099 return CC_FLAG_C | CC_FLAG_Z;
3100 case EQ:
3101 case NE:
3102 return CC_FLAG_Z;
3103 default:
3104 gcc_unreachable ();
3108 /* Return a CC_MODE of which both M1 and M2 are subsets. */
3110 static enum machine_mode
3111 rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
3113 unsigned f;
3115 /* Early out for identical modes. */
3116 if (m1 == m2)
3117 return m1;
3119 /* There's no valid combination for FP vs non-FP. */
3120 f = flags_from_mode (m1) | flags_from_mode (m2);
3121 if (f & CC_FLAG_FP)
3122 return VOIDmode;
3124 /* Otherwise, see what mode can implement all the flags. */
3125 return mode_from_flags (f);
3128 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3130 enum machine_mode
3131 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3133 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3134 return CC_Fmode;
3136 if (y != const0_rtx)
3137 return CCmode;
3139 return mode_from_flags (flags_from_code (cmp_code));
3142 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3143 CC_MODE, and use that in branches based on that compare. */
3145 void
3146 rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
3147 rtx c1, rtx c2, rtx label)
3149 rtx flags, x;
3151 flags = gen_rtx_REG (cc_mode, CC_REG);
3152 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3153 x = gen_rtx_SET (VOIDmode, flags, x);
3154 emit_insn (x);
3156 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3157 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3158 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3159 emit_jump_insn (x);
3162 /* A helper function for matching parallels that set the flags. */
3164 bool
3165 rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
3167 rtx op1, flags;
3168 enum machine_mode flags_mode;
3170 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3172 op1 = XVECEXP (PATTERN (insn), 0, 1);
3173 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3175 flags = SET_DEST (op1);
3176 flags_mode = GET_MODE (flags);
3178 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3179 return false;
3180 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3181 return false;
3183 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3184 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3185 return false;
3187 return true;
3191 rx_align_for_label (rtx lab, int uses_threshold)
3193 /* This is a simple heuristic to guess when an alignment would not be useful
3194 because the delay due to the inserted NOPs would be greater than the delay
3195 due to the misaligned branch. If uses_threshold is zero then the alignment
3196 is always useful. */
3197 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3198 return 0;
3200 if (optimize_size)
3201 return 0;
3202 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3203 return 2;
3204 return 2;
3207 static int
3208 rx_max_skip_for_label (rtx lab)
3210 int opsize;
3211 rtx op;
3213 if (optimize_size)
3214 return 0;
3216 if (lab == NULL_RTX)
3217 return 0;
3219 op = lab;
3222 op = next_nonnote_nondebug_insn (op);
3224 while (op && (LABEL_P (op)
3225 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3226 if (!op)
3227 return 0;
3229 opsize = get_attr_length (op);
3230 if (opsize >= 0 && opsize < 8)
3231 return opsize - 1;
3232 return 0;
3235 /* Compute the real length of the extending load-and-op instructions. */
3238 rx_adjust_insn_length (rtx insn, int current_length)
3240 rtx extend, mem, offset;
3241 bool zero;
3242 int factor;
3244 if (!INSN_P (insn))
3245 return current_length;
3247 switch (INSN_CODE (insn))
3249 default:
3250 return current_length;
3252 case CODE_FOR_plussi3_zero_extendhi:
3253 case CODE_FOR_andsi3_zero_extendhi:
3254 case CODE_FOR_iorsi3_zero_extendhi:
3255 case CODE_FOR_xorsi3_zero_extendhi:
3256 case CODE_FOR_divsi3_zero_extendhi:
3257 case CODE_FOR_udivsi3_zero_extendhi:
3258 case CODE_FOR_minussi3_zero_extendhi:
3259 case CODE_FOR_smaxsi3_zero_extendhi:
3260 case CODE_FOR_sminsi3_zero_extendhi:
3261 case CODE_FOR_multsi3_zero_extendhi:
3262 case CODE_FOR_comparesi3_zero_extendhi:
3263 zero = true;
3264 factor = 2;
3265 break;
3267 case CODE_FOR_plussi3_sign_extendhi:
3268 case CODE_FOR_andsi3_sign_extendhi:
3269 case CODE_FOR_iorsi3_sign_extendhi:
3270 case CODE_FOR_xorsi3_sign_extendhi:
3271 case CODE_FOR_divsi3_sign_extendhi:
3272 case CODE_FOR_udivsi3_sign_extendhi:
3273 case CODE_FOR_minussi3_sign_extendhi:
3274 case CODE_FOR_smaxsi3_sign_extendhi:
3275 case CODE_FOR_sminsi3_sign_extendhi:
3276 case CODE_FOR_multsi3_sign_extendhi:
3277 case CODE_FOR_comparesi3_sign_extendhi:
3278 zero = false;
3279 factor = 2;
3280 break;
3282 case CODE_FOR_plussi3_zero_extendqi:
3283 case CODE_FOR_andsi3_zero_extendqi:
3284 case CODE_FOR_iorsi3_zero_extendqi:
3285 case CODE_FOR_xorsi3_zero_extendqi:
3286 case CODE_FOR_divsi3_zero_extendqi:
3287 case CODE_FOR_udivsi3_zero_extendqi:
3288 case CODE_FOR_minussi3_zero_extendqi:
3289 case CODE_FOR_smaxsi3_zero_extendqi:
3290 case CODE_FOR_sminsi3_zero_extendqi:
3291 case CODE_FOR_multsi3_zero_extendqi:
3292 case CODE_FOR_comparesi3_zero_extendqi:
3293 zero = true;
3294 factor = 1;
3295 break;
3297 case CODE_FOR_plussi3_sign_extendqi:
3298 case CODE_FOR_andsi3_sign_extendqi:
3299 case CODE_FOR_iorsi3_sign_extendqi:
3300 case CODE_FOR_xorsi3_sign_extendqi:
3301 case CODE_FOR_divsi3_sign_extendqi:
3302 case CODE_FOR_udivsi3_sign_extendqi:
3303 case CODE_FOR_minussi3_sign_extendqi:
3304 case CODE_FOR_smaxsi3_sign_extendqi:
3305 case CODE_FOR_sminsi3_sign_extendqi:
3306 case CODE_FOR_multsi3_sign_extendqi:
3307 case CODE_FOR_comparesi3_sign_extendqi:
3308 zero = false;
3309 factor = 1;
3310 break;
3313 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3314 extend = single_set (insn);
3315 gcc_assert (extend != NULL_RTX);
3317 extend = SET_SRC (extend);
3318 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3319 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3320 extend = XEXP (extend, 0);
3321 else
3322 extend = XEXP (extend, 1);
3324 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3325 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3327 mem = XEXP (extend, 0);
3328 gcc_checking_assert (MEM_P (mem));
3329 if (REG_P (XEXP (mem, 0)))
3330 return (zero && factor == 1) ? 2 : 3;
3332 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3333 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3334 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3336 offset = XEXP (XEXP (mem, 0), 1);
3337 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3339 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3340 return (zero && factor == 1) ? 3 : 4;
3342 return (zero && factor == 1) ? 4 : 5;
3345 static bool
3346 rx_narrow_volatile_bitfield (void)
3348 return true;
3351 static bool
3352 rx_ok_to_inline (tree caller, tree callee)
3354 /* Do not inline functions with local variables
3355 into a naked CALLER - naked function have no stack frame and
3356 locals need a frame in order to have somewhere to live.
3358 Unfortunately we have no way to determine the presence of
3359 local variables in CALLEE, so we have to be cautious and
3360 assume that there might be some there.
3362 We do allow inlining when CALLEE has the "inline" type
3363 modifier or the "always_inline" or "gnu_inline" attributes. */
3364 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3365 || DECL_DECLARED_INLINE_P (callee)
3366 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3367 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3370 static bool
3371 rx_enable_lra (void)
3373 return TARGET_ENABLE_LRA;
3377 #undef TARGET_NARROW_VOLATILE_BITFIELD
3378 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3380 #undef TARGET_CAN_INLINE_P
3381 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3383 #undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3384 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3385 #undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3386 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3387 #undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3388 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3389 #undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3390 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3392 #undef TARGET_FUNCTION_VALUE
3393 #define TARGET_FUNCTION_VALUE rx_function_value
3395 #undef TARGET_RETURN_IN_MSB
3396 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3398 #undef TARGET_IN_SMALL_DATA_P
3399 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3401 #undef TARGET_RETURN_IN_MEMORY
3402 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3404 #undef TARGET_HAVE_SRODATA_SECTION
3405 #define TARGET_HAVE_SRODATA_SECTION true
3407 #undef TARGET_ASM_SELECT_RTX_SECTION
3408 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3410 #undef TARGET_ASM_SELECT_SECTION
3411 #define TARGET_ASM_SELECT_SECTION rx_select_section
3413 #undef TARGET_INIT_BUILTINS
3414 #define TARGET_INIT_BUILTINS rx_init_builtins
3416 #undef TARGET_BUILTIN_DECL
3417 #define TARGET_BUILTIN_DECL rx_builtin_decl
3419 #undef TARGET_EXPAND_BUILTIN
3420 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3422 #undef TARGET_ASM_CONSTRUCTOR
3423 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3425 #undef TARGET_ASM_DESTRUCTOR
3426 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3428 #undef TARGET_STRUCT_VALUE_RTX
3429 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3431 #undef TARGET_ATTRIBUTE_TABLE
3432 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3434 #undef TARGET_ASM_FILE_START
3435 #define TARGET_ASM_FILE_START rx_file_start
3437 #undef TARGET_MS_BITFIELD_LAYOUT_P
3438 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3440 #undef TARGET_LEGITIMATE_ADDRESS_P
3441 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3443 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3444 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3446 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3447 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3449 #undef TARGET_ASM_FUNCTION_PROLOGUE
3450 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3452 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3453 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3455 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3456 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3458 #undef TARGET_FUNCTION_ARG
3459 #define TARGET_FUNCTION_ARG rx_function_arg
3461 #undef TARGET_FUNCTION_ARG_ADVANCE
3462 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3464 #undef TARGET_FUNCTION_ARG_BOUNDARY
3465 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3467 #undef TARGET_SET_CURRENT_FUNCTION
3468 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3470 #undef TARGET_ASM_INTEGER
3471 #define TARGET_ASM_INTEGER rx_assemble_integer
3473 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3474 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3476 #undef TARGET_MAX_ANCHOR_OFFSET
3477 #define TARGET_MAX_ANCHOR_OFFSET 32
3479 #undef TARGET_ADDRESS_COST
3480 #define TARGET_ADDRESS_COST rx_address_cost
3482 #undef TARGET_CAN_ELIMINATE
3483 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3485 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3486 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3488 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3489 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3491 #undef TARGET_TRAMPOLINE_INIT
3492 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3494 #undef TARGET_PRINT_OPERAND
3495 #define TARGET_PRINT_OPERAND rx_print_operand
3497 #undef TARGET_PRINT_OPERAND_ADDRESS
3498 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3500 #undef TARGET_CC_MODES_COMPATIBLE
3501 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3503 #undef TARGET_MEMORY_MOVE_COST
3504 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3506 #undef TARGET_OPTION_OVERRIDE
3507 #define TARGET_OPTION_OVERRIDE rx_option_override
3509 #undef TARGET_PROMOTE_FUNCTION_MODE
3510 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3512 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3513 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3515 #undef TARGET_FLAGS_REGNUM
3516 #define TARGET_FLAGS_REGNUM CC_REG
3518 #undef TARGET_LEGITIMATE_CONSTANT_P
3519 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3521 #undef TARGET_LEGITIMIZE_ADDRESS
3522 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3524 #undef TARGET_WARN_FUNC_RETURN
3525 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3527 #undef TARGET_LRA_P
3528 #define TARGET_LRA_P rx_enable_lra
3530 struct gcc_target targetm = TARGET_INITIALIZER;
3532 #include "gt-rx.h"