2014-12-19 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / config / rx / rx.c
blobdacf990fad7728fd3525e73a50e62fb5d3c52b06
1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* To Do:
23 * Re-enable memory-to-memory copies and fix up reload. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "varasm.h"
31 #include "stor-layout.h"
32 #include "calls.h"
33 #include "rtl.h"
34 #include "regs.h"
35 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "conditions.h"
38 #include "output.h"
39 #include "insn-attr.h"
40 #include "flags.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "input.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "recog.h"
52 #include "diagnostic-core.h"
53 #include "toplev.h"
54 #include "reload.h"
55 #include "dominance.h"
56 #include "cfg.h"
57 #include "cfgrtl.h"
58 #include "cfganal.h"
59 #include "lcm.h"
60 #include "cfgbuild.h"
61 #include "cfgcleanup.h"
62 #include "predict.h"
63 #include "basic-block.h"
64 #include "df.h"
65 #include "ggc.h"
66 #include "tm_p.h"
67 #include "debug.h"
68 #include "target.h"
69 #include "target-def.h"
70 #include "langhooks.h"
71 #include "opts.h"
72 #include "hash-map.h"
73 #include "is-a.h"
74 #include "plugin-api.h"
75 #include "ipa-ref.h"
76 #include "cgraph.h"
77 #include "builtins.h"
79 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
80 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
81 static unsigned int rx_num_interrupt_regs;
83 static unsigned int
84 rx_gp_base_regnum (void)
86 if (rx_gp_base_regnum_val == INVALID_REGNUM)
87 gcc_unreachable ();
88 return rx_gp_base_regnum_val;
91 static unsigned int
92 rx_pid_base_regnum (void)
94 if (rx_pid_base_regnum_val == INVALID_REGNUM)
95 gcc_unreachable ();
96 return rx_pid_base_regnum_val;
99 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
101 static tree
102 rx_decl_for_addr (rtx op)
104 if (GET_CODE (op) == MEM)
105 op = XEXP (op, 0);
106 if (GET_CODE (op) == CONST)
107 op = XEXP (op, 0);
108 while (GET_CODE (op) == PLUS)
109 op = XEXP (op, 0);
110 if (GET_CODE (op) == SYMBOL_REF)
111 return SYMBOL_REF_DECL (op);
112 return NULL_TREE;
115 static void rx_print_operand (FILE *, rtx, int);
117 #define CC_FLAG_S (1 << 0)
118 #define CC_FLAG_Z (1 << 1)
119 #define CC_FLAG_O (1 << 2)
120 #define CC_FLAG_C (1 << 3)
121 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
123 static unsigned int flags_from_mode (machine_mode mode);
124 static unsigned int flags_from_code (enum rtx_code code);
126 /* Return true if OP is a reference to an object in a PID data area. */
128 enum pid_type
130 PID_NOT_PID = 0, /* The object is not in the PID data area. */
131 PID_ENCODED, /* The object is in the PID data area. */
132 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
135 static enum pid_type
136 rx_pid_data_operand (rtx op)
138 tree op_decl;
140 if (!TARGET_PID)
141 return PID_NOT_PID;
143 if (GET_CODE (op) == PLUS
144 && GET_CODE (XEXP (op, 0)) == REG
145 && GET_CODE (XEXP (op, 1)) == CONST
146 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
147 return PID_ENCODED;
149 op_decl = rx_decl_for_addr (op);
151 if (op_decl)
153 if (TREE_READONLY (op_decl))
154 return PID_UNENCODED;
156 else
158 /* Sigh, some special cases. */
159 if (GET_CODE (op) == SYMBOL_REF
160 || GET_CODE (op) == LABEL_REF)
161 return PID_UNENCODED;
164 return PID_NOT_PID;
167 static rtx
168 rx_legitimize_address (rtx x,
169 rtx oldx ATTRIBUTE_UNUSED,
170 machine_mode mode ATTRIBUTE_UNUSED)
172 if (rx_pid_data_operand (x) == PID_UNENCODED)
174 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
175 return rv;
178 if (GET_CODE (x) == PLUS
179 && GET_CODE (XEXP (x, 0)) == PLUS
180 && REG_P (XEXP (XEXP (x, 0), 0))
181 && REG_P (XEXP (x, 1)))
182 return force_reg (SImode, x);
184 return x;
187 /* Return true if OP is a reference to an object in a small data area. */
189 static bool
190 rx_small_data_operand (rtx op)
192 if (rx_small_data_limit == 0)
193 return false;
195 if (GET_CODE (op) == SYMBOL_REF)
196 return SYMBOL_REF_SMALL_P (op);
198 return false;
201 static bool
202 rx_is_legitimate_address (machine_mode mode, rtx x,
203 bool strict ATTRIBUTE_UNUSED)
205 if (RTX_OK_FOR_BASE (x, strict))
206 /* Register Indirect. */
207 return true;
209 if ((GET_MODE_SIZE (mode) == 4
210 || GET_MODE_SIZE (mode) == 2
211 || GET_MODE_SIZE (mode) == 1)
212 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
213 /* Pre-decrement Register Indirect or
214 Post-increment Register Indirect. */
215 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
217 switch (rx_pid_data_operand (x))
219 case PID_UNENCODED:
220 return false;
221 case PID_ENCODED:
222 return true;
223 default:
224 break;
227 if (GET_CODE (x) == PLUS)
229 rtx arg1 = XEXP (x, 0);
230 rtx arg2 = XEXP (x, 1);
231 rtx index = NULL_RTX;
233 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
234 index = arg2;
235 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
236 index = arg1;
237 else
238 return false;
240 switch (GET_CODE (index))
242 case CONST_INT:
244 /* Register Relative: REG + INT.
245 Only positive, mode-aligned, mode-sized
246 displacements are allowed. */
247 HOST_WIDE_INT val = INTVAL (index);
248 int factor;
250 if (val < 0)
251 return false;
253 switch (GET_MODE_SIZE (mode))
255 default:
256 case 4: factor = 4; break;
257 case 2: factor = 2; break;
258 case 1: factor = 1; break;
261 if (val > (65535 * factor))
262 return false;
263 return (val % factor) == 0;
266 case REG:
267 /* Unscaled Indexed Register Indirect: REG + REG
268 Size has to be "QI", REG has to be valid. */
269 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
271 case MULT:
273 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
274 Factor has to equal the mode size, REG has to be valid. */
275 rtx factor;
277 factor = XEXP (index, 1);
278 index = XEXP (index, 0);
280 return REG_P (index)
281 && RTX_OK_FOR_BASE (index, strict)
282 && CONST_INT_P (factor)
283 && GET_MODE_SIZE (mode) == INTVAL (factor);
286 default:
287 return false;
291 /* Small data area accesses turn into register relative offsets. */
292 return rx_small_data_operand (x);
295 /* Returns TRUE for simple memory addreses, ie ones
296 that do not involve register indirect addressing
297 or pre/post increment/decrement. */
299 bool
300 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
302 if (! rx_is_legitimate_address
303 (mode, mem, reload_in_progress || reload_completed))
304 return false;
306 switch (GET_CODE (mem))
308 case REG:
309 /* Simple memory addresses are OK. */
310 return true;
312 case PRE_DEC:
313 case POST_INC:
314 return false;
316 case PLUS:
318 rtx base, index;
320 /* Only allow REG+INT addressing. */
321 base = XEXP (mem, 0);
322 index = XEXP (mem, 1);
324 if (! RX_REG_P (base) || ! CONST_INT_P (index))
325 return false;
327 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
330 case SYMBOL_REF:
331 /* Can happen when small data is being supported.
332 Assume that it will be resolved into GP+INT. */
333 return true;
335 default:
336 gcc_unreachable ();
340 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
342 static bool
343 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
345 if (GET_CODE (addr) == CONST)
346 addr = XEXP (addr, 0);
348 switch (GET_CODE (addr))
350 /* --REG and REG++ only work in SImode. */
351 case PRE_DEC:
352 case POST_INC:
353 return true;
355 case MINUS:
356 case PLUS:
357 if (! REG_P (XEXP (addr, 0)))
358 return true;
360 addr = XEXP (addr, 1);
362 switch (GET_CODE (addr))
364 case REG:
365 /* REG+REG only works in SImode. */
366 return true;
368 case CONST_INT:
369 /* REG+INT is only mode independent if INT is a
370 multiple of 4, positive and will fit into 16-bits. */
371 if (((INTVAL (addr) & 3) == 0)
372 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
373 return false;
374 return true;
376 case SYMBOL_REF:
377 case LABEL_REF:
378 return true;
380 case MULT:
381 gcc_assert (REG_P (XEXP (addr, 0)));
382 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
383 /* REG+REG*SCALE is always mode dependent. */
384 return true;
386 default:
387 /* Not recognized, so treat as mode dependent. */
388 return true;
391 case CONST_INT:
392 case SYMBOL_REF:
393 case LABEL_REF:
394 case REG:
395 /* These are all mode independent. */
396 return false;
398 default:
399 /* Everything else is unrecognized,
400 so treat as mode dependent. */
401 return true;
405 /* A C compound statement to output to stdio stream FILE the
406 assembler syntax for an instruction operand that is a memory
407 reference whose address is ADDR. */
409 static void
410 rx_print_operand_address (FILE * file, rtx addr)
412 switch (GET_CODE (addr))
414 case REG:
415 fprintf (file, "[");
416 rx_print_operand (file, addr, 0);
417 fprintf (file, "]");
418 break;
420 case PRE_DEC:
421 fprintf (file, "[-");
422 rx_print_operand (file, XEXP (addr, 0), 0);
423 fprintf (file, "]");
424 break;
426 case POST_INC:
427 fprintf (file, "[");
428 rx_print_operand (file, XEXP (addr, 0), 0);
429 fprintf (file, "+]");
430 break;
432 case PLUS:
434 rtx arg1 = XEXP (addr, 0);
435 rtx arg2 = XEXP (addr, 1);
436 rtx base, index;
438 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
439 base = arg1, index = arg2;
440 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
441 base = arg2, index = arg1;
442 else
444 rx_print_operand (file, arg1, 0);
445 fprintf (file, " + ");
446 rx_print_operand (file, arg2, 0);
447 break;
450 if (REG_P (index) || GET_CODE (index) == MULT)
452 fprintf (file, "[");
453 rx_print_operand (file, index, 'A');
454 fprintf (file, ",");
456 else /* GET_CODE (index) == CONST_INT */
458 rx_print_operand (file, index, 'A');
459 fprintf (file, "[");
461 rx_print_operand (file, base, 0);
462 fprintf (file, "]");
463 break;
466 case CONST:
467 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
469 addr = XEXP (addr, 0);
470 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
472 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
473 case UNSPEC:
474 addr = XVECEXP (addr, 0, 0);
475 gcc_assert (CONST_INT_P (addr));
477 /* Fall through. */
478 case LABEL_REF:
479 case SYMBOL_REF:
480 fprintf (file, "#");
481 /* Fall through. */
482 default:
483 output_addr_const (file, addr);
484 break;
488 static void
489 rx_print_integer (FILE * file, HOST_WIDE_INT val)
491 if (IN_RANGE (val, -64, 64))
492 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
493 else
494 fprintf (file,
495 TARGET_AS100_SYNTAX
496 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
497 val);
500 static bool
501 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
503 const char * op = integer_asm_op (size, is_aligned);
505 if (! CONST_INT_P (x))
506 return default_assemble_integer (x, size, is_aligned);
508 if (op == NULL)
509 return false;
510 fputs (op, asm_out_file);
512 rx_print_integer (asm_out_file, INTVAL (x));
513 fputc ('\n', asm_out_file);
514 return true;
518 /* Handles the insertion of a single operand into the assembler output.
519 The %<letter> directives supported are:
521 %A Print an operand without a leading # character.
522 %B Print an integer comparison name.
523 %C Print a control register name.
524 %F Print a condition code flag name.
525 %G Register used for small-data-area addressing
526 %H Print high part of a DImode register, integer or address.
527 %L Print low part of a DImode register, integer or address.
528 %N Print the negation of the immediate value.
529 %P Register used for PID addressing
530 %Q If the operand is a MEM, then correctly generate
531 register indirect or register relative addressing.
532 %R Like %Q but for zero-extending loads. */
534 static void
535 rx_print_operand (FILE * file, rtx op, int letter)
537 bool unsigned_load = false;
538 bool print_hash = true;
540 if (letter == 'A'
541 && ((GET_CODE (op) == CONST
542 && GET_CODE (XEXP (op, 0)) == UNSPEC)
543 || GET_CODE (op) == UNSPEC))
545 print_hash = false;
546 letter = 0;
549 switch (letter)
551 case 'A':
552 /* Print an operand without a leading #. */
553 if (MEM_P (op))
554 op = XEXP (op, 0);
556 switch (GET_CODE (op))
558 case LABEL_REF:
559 case SYMBOL_REF:
560 output_addr_const (file, op);
561 break;
562 case CONST_INT:
563 fprintf (file, "%ld", (long) INTVAL (op));
564 break;
565 default:
566 rx_print_operand (file, op, 0);
567 break;
569 break;
571 case 'B':
573 enum rtx_code code = GET_CODE (op);
574 machine_mode mode = GET_MODE (XEXP (op, 0));
575 const char *ret;
577 if (mode == CC_Fmode)
579 /* C flag is undefined, and O flag carries unordered. None of the
580 branch combinations that include O use it helpfully. */
581 switch (code)
583 case ORDERED:
584 ret = "no";
585 break;
586 case UNORDERED:
587 ret = "o";
588 break;
589 case LT:
590 ret = "n";
591 break;
592 case GE:
593 ret = "pz";
594 break;
595 case EQ:
596 ret = "eq";
597 break;
598 case NE:
599 ret = "ne";
600 break;
601 default:
602 gcc_unreachable ();
605 else
607 unsigned int flags = flags_from_mode (mode);
609 switch (code)
611 case LT:
612 ret = (flags & CC_FLAG_O ? "lt" : "n");
613 break;
614 case GE:
615 ret = (flags & CC_FLAG_O ? "ge" : "pz");
616 break;
617 case GT:
618 ret = "gt";
619 break;
620 case LE:
621 ret = "le";
622 break;
623 case GEU:
624 ret = "geu";
625 break;
626 case LTU:
627 ret = "ltu";
628 break;
629 case GTU:
630 ret = "gtu";
631 break;
632 case LEU:
633 ret = "leu";
634 break;
635 case EQ:
636 ret = "eq";
637 break;
638 case NE:
639 ret = "ne";
640 break;
641 default:
642 gcc_unreachable ();
644 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
646 fputs (ret, file);
647 break;
650 case 'C':
651 gcc_assert (CONST_INT_P (op));
652 switch (INTVAL (op))
654 case 0: fprintf (file, "psw"); break;
655 case 2: fprintf (file, "usp"); break;
656 case 3: fprintf (file, "fpsw"); break;
657 case 4: fprintf (file, "cpen"); break;
658 case 8: fprintf (file, "bpsw"); break;
659 case 9: fprintf (file, "bpc"); break;
660 case 0xa: fprintf (file, "isp"); break;
661 case 0xb: fprintf (file, "fintv"); break;
662 case 0xc: fprintf (file, "intb"); break;
663 default:
664 warning (0, "unrecognized control register number: %d - using 'psw'",
665 (int) INTVAL (op));
666 fprintf (file, "psw");
667 break;
669 break;
671 case 'F':
672 gcc_assert (CONST_INT_P (op));
673 switch (INTVAL (op))
675 case 0: case 'c': case 'C': fprintf (file, "C"); break;
676 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
677 case 2: case 's': case 'S': fprintf (file, "S"); break;
678 case 3: case 'o': case 'O': fprintf (file, "O"); break;
679 case 8: case 'i': case 'I': fprintf (file, "I"); break;
680 case 9: case 'u': case 'U': fprintf (file, "U"); break;
681 default:
682 gcc_unreachable ();
684 break;
686 case 'G':
687 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
688 break;
690 case 'H':
691 switch (GET_CODE (op))
693 case REG:
694 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
695 break;
696 case CONST_INT:
698 HOST_WIDE_INT v = INTVAL (op);
700 fprintf (file, "#");
701 /* Trickery to avoid problems with shifting 32 bits at a time. */
702 v = v >> 16;
703 v = v >> 16;
704 rx_print_integer (file, v);
705 break;
707 case CONST_DOUBLE:
708 fprintf (file, "#");
709 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
710 break;
711 case MEM:
712 if (! WORDS_BIG_ENDIAN)
713 op = adjust_address (op, SImode, 4);
714 output_address (XEXP (op, 0));
715 break;
716 default:
717 gcc_unreachable ();
719 break;
721 case 'L':
722 switch (GET_CODE (op))
724 case REG:
725 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
726 break;
727 case CONST_INT:
728 fprintf (file, "#");
729 rx_print_integer (file, INTVAL (op) & 0xffffffff);
730 break;
731 case CONST_DOUBLE:
732 fprintf (file, "#");
733 rx_print_integer (file, CONST_DOUBLE_LOW (op));
734 break;
735 case MEM:
736 if (WORDS_BIG_ENDIAN)
737 op = adjust_address (op, SImode, 4);
738 output_address (XEXP (op, 0));
739 break;
740 default:
741 gcc_unreachable ();
743 break;
745 case 'N':
746 gcc_assert (CONST_INT_P (op));
747 fprintf (file, "#");
748 rx_print_integer (file, - INTVAL (op));
749 break;
751 case 'P':
752 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
753 break;
755 case 'R':
756 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
757 unsigned_load = true;
758 /* Fall through. */
759 case 'Q':
760 if (MEM_P (op))
762 HOST_WIDE_INT offset;
763 rtx mem = op;
765 op = XEXP (op, 0);
767 if (REG_P (op))
768 offset = 0;
769 else if (GET_CODE (op) == PLUS)
771 rtx displacement;
773 if (REG_P (XEXP (op, 0)))
775 displacement = XEXP (op, 1);
776 op = XEXP (op, 0);
778 else
780 displacement = XEXP (op, 0);
781 op = XEXP (op, 1);
782 gcc_assert (REG_P (op));
785 gcc_assert (CONST_INT_P (displacement));
786 offset = INTVAL (displacement);
787 gcc_assert (offset >= 0);
789 fprintf (file, "%ld", offset);
791 else
792 gcc_unreachable ();
794 fprintf (file, "[");
795 rx_print_operand (file, op, 0);
796 fprintf (file, "].");
798 switch (GET_MODE_SIZE (GET_MODE (mem)))
800 case 1:
801 gcc_assert (offset <= 65535 * 1);
802 fprintf (file, unsigned_load ? "UB" : "B");
803 break;
804 case 2:
805 gcc_assert (offset % 2 == 0);
806 gcc_assert (offset <= 65535 * 2);
807 fprintf (file, unsigned_load ? "UW" : "W");
808 break;
809 case 4:
810 gcc_assert (offset % 4 == 0);
811 gcc_assert (offset <= 65535 * 4);
812 fprintf (file, "L");
813 break;
814 default:
815 gcc_unreachable ();
817 break;
820 /* Fall through. */
822 default:
823 if (GET_CODE (op) == CONST
824 && GET_CODE (XEXP (op, 0)) == UNSPEC)
825 op = XEXP (op, 0);
826 else if (GET_CODE (op) == CONST
827 && GET_CODE (XEXP (op, 0)) == PLUS
828 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
829 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
831 if (print_hash)
832 fprintf (file, "#");
833 fprintf (file, "(");
834 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
835 fprintf (file, " + ");
836 output_addr_const (file, XEXP (XEXP (op, 0), 1));
837 fprintf (file, ")");
838 return;
841 switch (GET_CODE (op))
843 case MULT:
844 /* Should be the scaled part of an
845 indexed register indirect address. */
847 rtx base = XEXP (op, 0);
848 rtx index = XEXP (op, 1);
850 /* Check for a swaped index register and scaling factor.
851 Not sure if this can happen, but be prepared to handle it. */
852 if (CONST_INT_P (base) && REG_P (index))
854 rtx tmp = base;
855 base = index;
856 index = tmp;
859 gcc_assert (REG_P (base));
860 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
861 gcc_assert (CONST_INT_P (index));
862 /* Do not try to verify the value of the scalar as it is based
863 on the mode of the MEM not the mode of the MULT. (Which
864 will always be SImode). */
865 fprintf (file, "%s", reg_names [REGNO (base)]);
866 break;
869 case MEM:
870 output_address (XEXP (op, 0));
871 break;
873 case PLUS:
874 output_address (op);
875 break;
877 case REG:
878 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
879 fprintf (file, "%s", reg_names [REGNO (op)]);
880 break;
882 case SUBREG:
883 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
884 fprintf (file, "%s", reg_names [subreg_regno (op)]);
885 break;
887 /* This will only be single precision.... */
888 case CONST_DOUBLE:
890 unsigned long val;
891 REAL_VALUE_TYPE rv;
893 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
894 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
895 if (print_hash)
896 fprintf (file, "#");
897 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
898 break;
901 case CONST_INT:
902 if (print_hash)
903 fprintf (file, "#");
904 rx_print_integer (file, INTVAL (op));
905 break;
907 case UNSPEC:
908 switch (XINT (op, 1))
910 case UNSPEC_PID_ADDR:
912 rtx sym, add;
914 if (print_hash)
915 fprintf (file, "#");
916 sym = XVECEXP (op, 0, 0);
917 add = NULL_RTX;
918 fprintf (file, "(");
919 if (GET_CODE (sym) == PLUS)
921 add = XEXP (sym, 1);
922 sym = XEXP (sym, 0);
924 output_addr_const (file, sym);
925 if (add != NULL_RTX)
927 fprintf (file, "+");
928 output_addr_const (file, add);
930 fprintf (file, "-__pid_base");
931 fprintf (file, ")");
932 return;
935 /* Fall through */
937 case CONST:
938 case SYMBOL_REF:
939 case LABEL_REF:
940 case CODE_LABEL:
941 rx_print_operand_address (file, op);
942 break;
944 default:
945 gcc_unreachable ();
947 break;
951 /* Maybe convert an operand into its PID format. */
954 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
956 if (rx_pid_data_operand (op) == PID_UNENCODED)
958 if (GET_CODE (op) == MEM)
960 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
961 op = replace_equiv_address (op, a);
963 else
965 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
968 if (copy_to_reg)
969 op = copy_to_mode_reg (GET_MODE (op), op);
971 return op;
974 /* Returns an assembler template for a move instruction. */
976 char *
977 rx_gen_move_template (rtx * operands, bool is_movu)
979 static char out_template [64];
980 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
981 const char * src_template;
982 const char * dst_template;
983 rtx dest = operands[0];
984 rtx src = operands[1];
986 /* Decide which extension, if any, should be given to the move instruction. */
987 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
989 case QImode:
990 /* The .B extension is not valid when
991 loading an immediate into a register. */
992 if (! REG_P (dest) || ! CONST_INT_P (src))
993 extension = ".B";
994 break;
995 case HImode:
996 if (! REG_P (dest) || ! CONST_INT_P (src))
997 /* The .W extension is not valid when
998 loading an immediate into a register. */
999 extension = ".W";
1000 break;
1001 case DFmode:
1002 case DImode:
1003 case SFmode:
1004 case SImode:
1005 extension = ".L";
1006 break;
1007 case VOIDmode:
1008 /* This mode is used by constants. */
1009 break;
1010 default:
1011 debug_rtx (src);
1012 gcc_unreachable ();
1015 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
1017 gcc_assert (GET_MODE (src) != DImode);
1018 gcc_assert (GET_MODE (src) != DFmode);
1020 src_template = "(%A1 - __pid_base)[%P1]";
1022 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1024 gcc_assert (GET_MODE (src) != DImode);
1025 gcc_assert (GET_MODE (src) != DFmode);
1027 src_template = "%%gp(%A1)[%G1]";
1029 else
1030 src_template = "%1";
1032 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1034 gcc_assert (GET_MODE (dest) != DImode);
1035 gcc_assert (GET_MODE (dest) != DFmode);
1037 dst_template = "%%gp(%A0)[%G0]";
1039 else
1040 dst_template = "%0";
1042 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1044 gcc_assert (! is_movu);
1046 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1047 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1048 else
1049 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1051 else
1052 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1053 extension, src_template, dst_template);
1054 return out_template;
1057 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1059 static inline unsigned int
1060 rx_round_up (unsigned int value, unsigned int alignment)
1062 alignment -= 1;
1063 return (value + alignment) & (~ alignment);
1066 /* Return the number of bytes in the argument registers
1067 occupied by an argument of type TYPE and mode MODE. */
1069 static unsigned int
1070 rx_function_arg_size (machine_mode mode, const_tree type)
1072 unsigned int num_bytes;
1074 num_bytes = (mode == BLKmode)
1075 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1076 return rx_round_up (num_bytes, UNITS_PER_WORD);
1079 #define NUM_ARG_REGS 4
1080 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1082 /* Return an RTL expression describing the register holding a function
1083 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1084 be passed on the stack. CUM describes the previous parameters to the
1085 function and NAMED is false if the parameter is part of a variable
1086 parameter list, or the last named parameter before the start of a
1087 variable parameter list. */
1089 static rtx
1090 rx_function_arg (cumulative_args_t cum, machine_mode mode,
1091 const_tree type, bool named)
1093 unsigned int next_reg;
1094 unsigned int bytes_so_far = *get_cumulative_args (cum);
1095 unsigned int size;
1096 unsigned int rounded_size;
1098 /* An exploded version of rx_function_arg_size. */
1099 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1100 /* If the size is not known it cannot be passed in registers. */
1101 if (size < 1)
1102 return NULL_RTX;
1104 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1106 /* Don't pass this arg via registers if there
1107 are insufficient registers to hold all of it. */
1108 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1109 return NULL_RTX;
1111 /* Unnamed arguments and the last named argument in a
1112 variadic function are always passed on the stack. */
1113 if (!named)
1114 return NULL_RTX;
1116 /* Structures must occupy an exact number of registers,
1117 otherwise they are passed on the stack. */
1118 if ((type == NULL || AGGREGATE_TYPE_P (type))
1119 && (size % UNITS_PER_WORD) != 0)
1120 return NULL_RTX;
1122 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1124 return gen_rtx_REG (mode, next_reg);
1127 static void
1128 rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
1129 const_tree type, bool named ATTRIBUTE_UNUSED)
1131 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1134 static unsigned int
1135 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1136 const_tree type ATTRIBUTE_UNUSED)
1138 /* Older versions of the RX backend aligned all on-stack arguments
1139 to 32-bits. The RX C ABI however says that they should be
1140 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1141 if (TARGET_GCC_ABI)
1142 return STACK_BOUNDARY;
1144 if (type)
1146 if (DECL_P (type))
1147 return DECL_ALIGN (type);
1148 return TYPE_ALIGN (type);
1151 return PARM_BOUNDARY;
1154 /* Return an RTL describing where a function return value of type RET_TYPE
1155 is held. */
1157 static rtx
1158 rx_function_value (const_tree ret_type,
1159 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1160 bool outgoing ATTRIBUTE_UNUSED)
1162 machine_mode mode = TYPE_MODE (ret_type);
1164 /* RX ABI specifies that small integer types are
1165 promoted to int when returned by a function. */
1166 if (GET_MODE_SIZE (mode) > 0
1167 && GET_MODE_SIZE (mode) < 4
1168 && ! COMPLEX_MODE_P (mode)
1170 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1172 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1175 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1176 regard to function returns as does TARGET_FUNCTION_VALUE. */
1178 static machine_mode
1179 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1180 machine_mode mode,
1181 int * punsignedp ATTRIBUTE_UNUSED,
1182 const_tree funtype ATTRIBUTE_UNUSED,
1183 int for_return)
1185 if (for_return != 1
1186 || GET_MODE_SIZE (mode) >= 4
1187 || COMPLEX_MODE_P (mode)
1188 || GET_MODE_SIZE (mode) < 1)
1189 return mode;
1191 return SImode;
1194 static bool
1195 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1197 HOST_WIDE_INT size;
1199 if (TYPE_MODE (type) != BLKmode
1200 && ! AGGREGATE_TYPE_P (type))
1201 return false;
1203 size = int_size_in_bytes (type);
1204 /* Large structs and those whose size is not an
1205 exact multiple of 4 are returned in memory. */
1206 return size < 1
1207 || size > 16
1208 || (size % UNITS_PER_WORD) != 0;
1211 static rtx
1212 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1213 int incoming ATTRIBUTE_UNUSED)
1215 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1218 static bool
1219 rx_return_in_msb (const_tree valtype)
1221 return TARGET_BIG_ENDIAN_DATA
1222 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1225 /* Returns true if the provided function has the specified attribute. */
1227 static inline bool
1228 has_func_attr (const_tree decl, const char * func_attr)
1230 if (decl == NULL_TREE)
1231 decl = current_function_decl;
1233 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1236 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1238 static inline bool
1239 is_fast_interrupt_func (const_tree decl)
1241 return has_func_attr (decl, "fast_interrupt");
1244 /* Returns true if the provided function has the "interrupt" attribute. */
1246 static inline bool
1247 is_interrupt_func (const_tree decl)
1249 return has_func_attr (decl, "interrupt");
1252 /* Returns true if the provided function has the "naked" attribute. */
1254 static inline bool
1255 is_naked_func (const_tree decl)
1257 return has_func_attr (decl, "naked");
1260 static bool use_fixed_regs = false;
1262 static void
1263 rx_conditional_register_usage (void)
1265 static bool using_fixed_regs = false;
1267 if (TARGET_PID)
1269 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1270 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1273 if (rx_small_data_limit > 0)
1275 if (TARGET_PID)
1276 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1277 else
1278 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1280 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1283 if (use_fixed_regs != using_fixed_regs)
1285 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1286 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1288 if (use_fixed_regs)
1290 unsigned int r;
1292 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1293 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1295 /* This is for fast interrupt handlers. Any register in
1296 the range r10 to r13 (inclusive) that is currently
1297 marked as fixed is now a viable, call-used register. */
1298 for (r = 10; r <= 13; r++)
1299 if (fixed_regs[r])
1301 fixed_regs[r] = 0;
1302 call_used_regs[r] = 1;
1305 /* Mark r7 as fixed. This is just a hack to avoid
1306 altering the reg_alloc_order array so that the newly
1307 freed r10-r13 registers are the preferred registers. */
1308 fixed_regs[7] = call_used_regs[7] = 1;
1310 else
1312 /* Restore the normal register masks. */
1313 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1314 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1317 using_fixed_regs = use_fixed_regs;
1321 struct decl_chain
1323 tree fndecl;
1324 struct decl_chain * next;
1327 /* Stack of decls for which we have issued warnings. */
1328 static struct decl_chain * warned_decls = NULL;
1330 static void
1331 add_warned_decl (tree fndecl)
1333 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1335 warned->fndecl = fndecl;
1336 warned->next = warned_decls;
1337 warned_decls = warned;
1340 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1342 static bool
1343 already_warned (tree fndecl)
1345 struct decl_chain * warned;
1347 for (warned = warned_decls;
1348 warned != NULL;
1349 warned = warned->next)
1350 if (warned->fndecl == fndecl)
1351 return true;
1353 return false;
1356 /* Perform any actions necessary before starting to compile FNDECL.
1357 For the RX we use this to make sure that we have the correct
1358 set of register masks selected. If FNDECL is NULL then we are
1359 compiling top level things. */
1361 static void
1362 rx_set_current_function (tree fndecl)
1364 /* Remember the last target of rx_set_current_function. */
1365 static tree rx_previous_fndecl;
1366 bool prev_was_fast_interrupt;
1367 bool current_is_fast_interrupt;
1369 /* Only change the context if the function changes. This hook is called
1370 several times in the course of compiling a function, and we don't want
1371 to slow things down too much or call target_reinit when it isn't safe. */
1372 if (fndecl == rx_previous_fndecl)
1373 return;
1375 prev_was_fast_interrupt
1376 = rx_previous_fndecl
1377 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1379 current_is_fast_interrupt
1380 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1382 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1384 use_fixed_regs = current_is_fast_interrupt;
1385 target_reinit ();
1388 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1390 /* We do not warn about the first fast interrupt routine that
1391 we see. Instead we just push it onto the stack. */
1392 if (warned_decls == NULL)
1393 add_warned_decl (fndecl);
1395 /* Otherwise if this fast interrupt is one for which we have
1396 not already issued a warning, generate one and then push
1397 it onto the stack as well. */
1398 else if (! already_warned (fndecl))
1400 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1401 fndecl, warned_decls->fndecl);
1402 add_warned_decl (fndecl);
1406 rx_previous_fndecl = fndecl;
1409 /* Typical stack layout should looks like this after the function's prologue:
1412 -- ^
1413 | | \ |
1414 | | arguments saved | Increasing
1415 | | on the stack | addresses
1416 PARENT arg pointer -> | | /
1417 -------------------------- ---- -------------------
1418 CHILD |ret | return address
1420 | | \
1421 | | call saved
1422 | | registers
1423 | | /
1425 | | \
1426 | | local
1427 | | variables
1428 frame pointer -> | | /
1430 | | \
1431 | | outgoing | Decreasing
1432 | | arguments | addresses
1433 current stack pointer -> | | / |
1434 -------------------------- ---- ------------------ V
1435 | | */
1437 static unsigned int
1438 bit_count (unsigned int x)
1440 const unsigned int m1 = 0x55555555;
1441 const unsigned int m2 = 0x33333333;
1442 const unsigned int m4 = 0x0f0f0f0f;
1444 x -= (x >> 1) & m1;
1445 x = (x & m2) + ((x >> 2) & m2);
1446 x = (x + (x >> 4)) & m4;
1447 x += x >> 8;
1449 return (x + (x >> 16)) & 0x3f;
1452 #define MUST_SAVE_ACC_REGISTER \
1453 (TARGET_SAVE_ACC_REGISTER \
1454 && (is_interrupt_func (NULL_TREE) \
1455 || is_fast_interrupt_func (NULL_TREE)))
1457 /* Returns either the lowest numbered and highest numbered registers that
1458 occupy the call-saved area of the stack frame, if the registers are
1459 stored as a contiguous block, or else a bitmask of the individual
1460 registers if they are stored piecemeal.
1462 Also computes the size of the frame and the size of the outgoing
1463 arguments block (in bytes). */
1465 static void
1466 rx_get_stack_layout (unsigned int * lowest,
1467 unsigned int * highest,
1468 unsigned int * register_mask,
1469 unsigned int * frame_size,
1470 unsigned int * stack_size)
1472 unsigned int reg;
1473 unsigned int low;
1474 unsigned int high;
1475 unsigned int fixed_reg = 0;
1476 unsigned int save_mask;
1477 unsigned int pushed_mask;
1478 unsigned int unneeded_pushes;
1480 if (is_naked_func (NULL_TREE))
1482 /* Naked functions do not create their own stack frame.
1483 Instead the programmer must do that for us. */
1484 * lowest = 0;
1485 * highest = 0;
1486 * register_mask = 0;
1487 * frame_size = 0;
1488 * stack_size = 0;
1489 return;
1492 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1494 if ((df_regs_ever_live_p (reg)
1495 /* Always save all call clobbered registers inside non-leaf
1496 interrupt handlers, even if they are not live - they may
1497 be used in (non-interrupt aware) routines called from this one. */
1498 || (call_used_regs[reg]
1499 && is_interrupt_func (NULL_TREE)
1500 && ! crtl->is_leaf))
1501 && (! call_used_regs[reg]
1502 /* Even call clobbered registered must
1503 be pushed inside interrupt handlers. */
1504 || is_interrupt_func (NULL_TREE)
1505 /* Likewise for fast interrupt handlers, except registers r10 -
1506 r13. These are normally call-saved, but may have been set
1507 to call-used by rx_conditional_register_usage. If so then
1508 they can be used in the fast interrupt handler without
1509 saving them on the stack. */
1510 || (is_fast_interrupt_func (NULL_TREE)
1511 && ! IN_RANGE (reg, 10, 13))))
1513 if (low == 0)
1514 low = reg;
1515 high = reg;
1517 save_mask |= 1 << reg;
1520 /* Remember if we see a fixed register
1521 after having found the low register. */
1522 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1523 fixed_reg = reg;
1526 /* If we have to save the accumulator register, make sure
1527 that at least two registers are pushed into the frame. */
1528 if (MUST_SAVE_ACC_REGISTER
1529 && bit_count (save_mask) < 2)
1531 save_mask |= (1 << 13) | (1 << 14);
1532 if (low == 0)
1533 low = 13;
1534 if (high == 0 || low == high)
1535 high = low + 1;
1538 /* Decide if it would be faster fill in the call-saved area of the stack
1539 frame using multiple PUSH instructions instead of a single PUSHM
1540 instruction.
1542 SAVE_MASK is a bitmask of the registers that must be stored in the
1543 call-save area. PUSHED_MASK is a bitmask of the registers that would
1544 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1545 is a bitmask of those registers in pushed_mask that are not in
1546 save_mask.
1548 We use a simple heuristic that says that it is better to use
1549 multiple PUSH instructions if the number of unnecessary pushes is
1550 greater than the number of necessary pushes.
1552 We also use multiple PUSH instructions if there are any fixed registers
1553 between LOW and HIGH. The only way that this can happen is if the user
1554 has specified --fixed-<reg-name> on the command line and in such
1555 circumstances we do not want to touch the fixed registers at all.
1557 FIXME: Is it worth improving this heuristic ? */
1558 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1559 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1561 if ((fixed_reg && fixed_reg <= high)
1562 || (optimize_function_for_speed_p (cfun)
1563 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1565 /* Use multiple pushes. */
1566 * lowest = 0;
1567 * highest = 0;
1568 * register_mask = save_mask;
1570 else
1572 /* Use one push multiple instruction. */
1573 * lowest = low;
1574 * highest = high;
1575 * register_mask = 0;
1578 * frame_size = rx_round_up
1579 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1581 if (crtl->args.size > 0)
1582 * frame_size += rx_round_up
1583 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1585 * stack_size = rx_round_up
1586 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1589 /* Generate a PUSHM instruction that matches the given operands. */
1591 void
1592 rx_emit_stack_pushm (rtx * operands)
1594 HOST_WIDE_INT last_reg;
1595 rtx first_push;
1597 gcc_assert (CONST_INT_P (operands[0]));
1598 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1600 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1601 first_push = XVECEXP (operands[1], 0, 1);
1602 gcc_assert (SET_P (first_push));
1603 first_push = SET_SRC (first_push);
1604 gcc_assert (REG_P (first_push));
1606 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1607 reg_names [REGNO (first_push) - last_reg],
1608 reg_names [REGNO (first_push)]);
1611 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1613 static rtx
1614 gen_rx_store_vector (unsigned int low, unsigned int high)
1616 unsigned int i;
1617 unsigned int count = (high - low) + 2;
1618 rtx vector;
1620 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1622 XVECEXP (vector, 0, 0) =
1623 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1624 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1625 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1627 for (i = 0; i < count - 1; i++)
1628 XVECEXP (vector, 0, i + 1) =
1629 gen_rtx_SET (VOIDmode,
1630 gen_rtx_MEM (SImode,
1631 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1632 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1633 gen_rtx_REG (SImode, high - i));
1634 return vector;
1637 /* Mark INSN as being frame related. If it is a PARALLEL
1638 then mark each element as being frame related as well. */
1640 static void
1641 mark_frame_related (rtx insn)
1643 RTX_FRAME_RELATED_P (insn) = 1;
1644 insn = PATTERN (insn);
1646 if (GET_CODE (insn) == PARALLEL)
1648 unsigned int i;
1650 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1651 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1655 static bool
1656 ok_for_max_constant (HOST_WIDE_INT val)
1658 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1659 /* If there is no constraint on the size of constants
1660 used as operands, then any value is legitimate. */
1661 return true;
1663 /* rx_max_constant_size specifies the maximum number
1664 of bytes that can be used to hold a signed value. */
1665 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1666 ( 1 << (rx_max_constant_size * 8)));
1669 /* Generate an ADD of SRC plus VAL into DEST.
1670 Handles the case where VAL is too big for max_constant_value.
1671 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1673 static void
1674 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1676 rtx insn;
1678 if (val == NULL_RTX || INTVAL (val) == 0)
1680 gcc_assert (dest != src);
1682 insn = emit_move_insn (dest, src);
1684 else if (ok_for_max_constant (INTVAL (val)))
1685 insn = emit_insn (gen_addsi3 (dest, src, val));
1686 else
1688 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1689 will not reject it. */
1690 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1691 insn = emit_insn (gen_addsi3 (dest, src, val));
1693 if (is_frame_related)
1694 /* We have to provide our own frame related note here
1695 as the dwarf2out code cannot be expected to grok
1696 our unspec. */
1697 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1698 gen_rtx_SET (SImode, dest,
1699 gen_rtx_PLUS (SImode, src, val)));
1700 return;
1703 if (is_frame_related)
1704 RTX_FRAME_RELATED_P (insn) = 1;
1705 return;
1708 void
1709 rx_expand_prologue (void)
1711 unsigned int stack_size;
1712 unsigned int frame_size;
1713 unsigned int mask;
1714 unsigned int low;
1715 unsigned int high;
1716 unsigned int reg;
1717 rtx insn;
1719 /* Naked functions use their own, programmer provided prologues. */
1720 if (is_naked_func (NULL_TREE))
1721 return;
1723 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1725 if (flag_stack_usage_info)
1726 current_function_static_stack_size = frame_size + stack_size;
1728 /* If we use any of the callee-saved registers, save them now. */
1729 if (mask)
1731 /* Push registers in reverse order. */
1732 for (reg = CC_REGNUM; reg --;)
1733 if (mask & (1 << reg))
1735 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1736 mark_frame_related (insn);
1739 else if (low)
1741 if (high == low)
1742 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1743 else
1744 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1745 * UNITS_PER_WORD),
1746 gen_rx_store_vector (low, high)));
1747 mark_frame_related (insn);
1750 if (MUST_SAVE_ACC_REGISTER)
1752 unsigned int acc_high, acc_low;
1754 /* Interrupt handlers have to preserve the accumulator
1755 register if so requested by the user. Use the first
1756 two pushed registers as intermediaries. */
1757 if (mask)
1759 acc_low = acc_high = 0;
1761 for (reg = 1; reg < CC_REGNUM; reg ++)
1762 if (mask & (1 << reg))
1764 if (acc_low == 0)
1765 acc_low = reg;
1766 else
1768 acc_high = reg;
1769 break;
1773 /* We have assumed that there are at least two registers pushed... */
1774 gcc_assert (acc_high != 0);
1776 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1777 We just assume that they are zero. */
1778 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1779 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1780 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1781 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1783 else
1785 acc_low = low;
1786 acc_high = low + 1;
1788 /* We have assumed that there are at least two registers pushed... */
1789 gcc_assert (acc_high <= high);
1791 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1792 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1793 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1794 gen_rx_store_vector (acc_low, acc_high)));
1798 /* If needed, set up the frame pointer. */
1799 if (frame_pointer_needed)
1800 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1801 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1803 /* Allocate space for the outgoing args.
1804 If the stack frame has not already been set up then handle this as well. */
1805 if (stack_size)
1807 if (frame_size)
1809 if (frame_pointer_needed)
1810 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1811 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1812 else
1813 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1814 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1815 true);
1817 else
1818 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1819 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1821 else if (frame_size)
1823 if (! frame_pointer_needed)
1824 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1825 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1826 else
1827 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1828 true);
1832 static void
1833 add_vector_labels (FILE *file, const char *aname)
1835 tree vec_attr;
1836 tree val_attr;
1837 const char *vname = "vect";
1838 const char *s;
1839 int vnum;
1841 /* This node is for the vector/interrupt tag itself */
1842 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1843 if (!vec_attr)
1844 return;
1846 /* Now point it at the first argument */
1847 vec_attr = TREE_VALUE (vec_attr);
1849 /* Iterate through the arguments. */
1850 while (vec_attr)
1852 val_attr = TREE_VALUE (vec_attr);
1853 switch (TREE_CODE (val_attr))
1855 case STRING_CST:
1856 s = TREE_STRING_POINTER (val_attr);
1857 goto string_id_common;
1859 case IDENTIFIER_NODE:
1860 s = IDENTIFIER_POINTER (val_attr);
1862 string_id_common:
1863 if (strcmp (s, "$default") == 0)
1865 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1866 fprintf (file, "$tableentry$default$%s:\n", vname);
1868 else
1869 vname = s;
1870 break;
1872 case INTEGER_CST:
1873 vnum = TREE_INT_CST_LOW (val_attr);
1875 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1876 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1877 break;
1879 default:
1883 vec_attr = TREE_CHAIN (vec_attr);
1888 static void
1889 rx_output_function_prologue (FILE * file,
1890 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1892 add_vector_labels (file, "interrupt");
1893 add_vector_labels (file, "vector");
1895 if (is_fast_interrupt_func (NULL_TREE))
1896 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1898 if (is_interrupt_func (NULL_TREE))
1899 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1901 if (is_naked_func (NULL_TREE))
1902 asm_fprintf (file, "\t; Note: Naked Function\n");
1904 if (cfun->static_chain_decl != NULL)
1905 asm_fprintf (file, "\t; Note: Nested function declared "
1906 "inside another function.\n");
1908 if (crtl->calls_eh_return)
1909 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1912 /* Generate a POPM or RTSD instruction that matches the given operands. */
1914 void
1915 rx_emit_stack_popm (rtx * operands, bool is_popm)
1917 HOST_WIDE_INT stack_adjust;
1918 HOST_WIDE_INT last_reg;
1919 rtx first_push;
1921 gcc_assert (CONST_INT_P (operands[0]));
1922 stack_adjust = INTVAL (operands[0]);
1924 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1925 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1927 first_push = XVECEXP (operands[1], 0, 1);
1928 gcc_assert (SET_P (first_push));
1929 first_push = SET_DEST (first_push);
1930 gcc_assert (REG_P (first_push));
1932 if (is_popm)
1933 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1934 reg_names [REGNO (first_push)],
1935 reg_names [REGNO (first_push) + last_reg]);
1936 else
1937 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1938 (int) stack_adjust,
1939 reg_names [REGNO (first_push)],
1940 reg_names [REGNO (first_push) + last_reg]);
1943 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1945 static rtx
1946 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1948 unsigned int i;
1949 unsigned int bias = 3;
1950 unsigned int count = (high - low) + bias;
1951 rtx vector;
1953 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1955 XVECEXP (vector, 0, 0) =
1956 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1957 plus_constant (Pmode, stack_pointer_rtx, adjust));
1959 for (i = 0; i < count - 2; i++)
1960 XVECEXP (vector, 0, i + 1) =
1961 gen_rtx_SET (VOIDmode,
1962 gen_rtx_REG (SImode, low + i),
1963 gen_rtx_MEM (SImode,
1964 i == 0 ? stack_pointer_rtx
1965 : plus_constant (Pmode, stack_pointer_rtx,
1966 i * UNITS_PER_WORD)));
1968 XVECEXP (vector, 0, count - 1) = ret_rtx;
1970 return vector;
1973 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1975 static rtx
1976 gen_rx_popm_vector (unsigned int low, unsigned int high)
1978 unsigned int i;
1979 unsigned int count = (high - low) + 2;
1980 rtx vector;
1982 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1984 XVECEXP (vector, 0, 0) =
1985 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1986 plus_constant (Pmode, stack_pointer_rtx,
1987 (count - 1) * UNITS_PER_WORD));
1989 for (i = 0; i < count - 1; i++)
1990 XVECEXP (vector, 0, i + 1) =
1991 gen_rtx_SET (VOIDmode,
1992 gen_rtx_REG (SImode, low + i),
1993 gen_rtx_MEM (SImode,
1994 i == 0 ? stack_pointer_rtx
1995 : plus_constant (Pmode, stack_pointer_rtx,
1996 i * UNITS_PER_WORD)));
1998 return vector;
2001 /* Returns true if a simple return insn can be used. */
2003 bool
2004 rx_can_use_simple_return (void)
2006 unsigned int low;
2007 unsigned int high;
2008 unsigned int frame_size;
2009 unsigned int stack_size;
2010 unsigned int register_mask;
2012 if (is_naked_func (NULL_TREE)
2013 || is_fast_interrupt_func (NULL_TREE)
2014 || is_interrupt_func (NULL_TREE))
2015 return false;
2017 rx_get_stack_layout (& low, & high, & register_mask,
2018 & frame_size, & stack_size);
2020 return (register_mask == 0
2021 && (frame_size + stack_size) == 0
2022 && low == 0);
2025 void
2026 rx_expand_epilogue (bool is_sibcall)
2028 unsigned int low;
2029 unsigned int high;
2030 unsigned int frame_size;
2031 unsigned int stack_size;
2032 unsigned int register_mask;
2033 unsigned int regs_size;
2034 unsigned int reg;
2035 unsigned HOST_WIDE_INT total_size;
2037 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2038 cannot guarantee that the register holding the function address is a
2039 call-used register. If it is a call-saved register then the stack
2040 pop instructions generated in the epilogue will corrupt the address
2041 before it is used.
2043 Creating a new call-used-only register class works but then the
2044 reload pass gets stuck because it cannot always find a call-used
2045 register for spilling sibcalls.
2047 The other possible solution is for this pass to scan forward for the
2048 sibcall instruction (if it has been generated) and work out if it
2049 is an indirect sibcall using a call-saved register. If it is then
2050 the address can copied into a call-used register in this epilogue
2051 code and the sibcall instruction modified to use that register. */
2053 if (is_naked_func (NULL_TREE))
2055 gcc_assert (! is_sibcall);
2057 /* Naked functions use their own, programmer provided epilogues.
2058 But, in order to keep gcc happy we have to generate some kind of
2059 epilogue RTL. */
2060 emit_jump_insn (gen_naked_return ());
2061 return;
2064 rx_get_stack_layout (& low, & high, & register_mask,
2065 & frame_size, & stack_size);
2067 total_size = frame_size + stack_size;
2068 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2070 /* See if we are unable to use the special stack frame deconstruct and
2071 return instructions. In most cases we can use them, but the exceptions
2072 are:
2074 - Sibling calling functions deconstruct the frame but do not return to
2075 their caller. Instead they branch to their sibling and allow their
2076 return instruction to return to this function's parent.
2078 - Fast and normal interrupt handling functions have to use special
2079 return instructions.
2081 - Functions where we have pushed a fragmented set of registers into the
2082 call-save area must have the same set of registers popped. */
2083 if (is_sibcall
2084 || is_fast_interrupt_func (NULL_TREE)
2085 || is_interrupt_func (NULL_TREE)
2086 || register_mask)
2088 /* Cannot use the special instructions - deconstruct by hand. */
2089 if (total_size)
2090 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2091 GEN_INT (total_size), false);
2093 if (MUST_SAVE_ACC_REGISTER)
2095 unsigned int acc_low, acc_high;
2097 /* Reverse the saving of the accumulator register onto the stack.
2098 Note we must adjust the saved "low" accumulator value as it
2099 is really the middle 32-bits of the accumulator. */
2100 if (register_mask)
2102 acc_low = acc_high = 0;
2104 for (reg = 1; reg < CC_REGNUM; reg ++)
2105 if (register_mask & (1 << reg))
2107 if (acc_low == 0)
2108 acc_low = reg;
2109 else
2111 acc_high = reg;
2112 break;
2115 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2116 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2118 else
2120 acc_low = low;
2121 acc_high = low + 1;
2122 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2123 gen_rx_popm_vector (acc_low, acc_high)));
2126 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2127 gen_rtx_REG (SImode, acc_low),
2128 GEN_INT (16)));
2129 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2130 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2133 if (register_mask)
2135 for (reg = 0; reg < CC_REGNUM; reg ++)
2136 if (register_mask & (1 << reg))
2137 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
2139 else if (low)
2141 if (high == low)
2142 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2143 else
2144 emit_insn (gen_stack_popm (GEN_INT (regs_size),
2145 gen_rx_popm_vector (low, high)));
2148 if (is_fast_interrupt_func (NULL_TREE))
2150 gcc_assert (! is_sibcall);
2151 emit_jump_insn (gen_fast_interrupt_return ());
2153 else if (is_interrupt_func (NULL_TREE))
2155 gcc_assert (! is_sibcall);
2156 emit_jump_insn (gen_exception_return ());
2158 else if (! is_sibcall)
2159 emit_jump_insn (gen_simple_return ());
2161 return;
2164 /* If we allocated space on the stack, free it now. */
2165 if (total_size)
2167 unsigned HOST_WIDE_INT rtsd_size;
2169 /* See if we can use the RTSD instruction. */
2170 rtsd_size = total_size + regs_size;
2171 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2173 if (low)
2174 emit_jump_insn (gen_pop_and_return
2175 (GEN_INT (rtsd_size),
2176 gen_rx_rtsd_vector (rtsd_size, low, high)));
2177 else
2178 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2180 return;
2183 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2184 GEN_INT (total_size), false);
2187 if (low)
2188 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2189 gen_rx_rtsd_vector (regs_size,
2190 low, high)));
2191 else
2192 emit_jump_insn (gen_simple_return ());
2196 /* Compute the offset (in words) between FROM (arg pointer
2197 or frame pointer) and TO (frame pointer or stack pointer).
2198 See ASCII art comment at the start of rx_expand_prologue
2199 for more information. */
2202 rx_initial_elimination_offset (int from, int to)
2204 unsigned int low;
2205 unsigned int high;
2206 unsigned int frame_size;
2207 unsigned int stack_size;
2208 unsigned int mask;
2210 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2212 if (from == ARG_POINTER_REGNUM)
2214 /* Extend the computed size of the stack frame to
2215 include the registers pushed in the prologue. */
2216 if (low)
2217 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2218 else
2219 frame_size += bit_count (mask) * UNITS_PER_WORD;
2221 /* Remember to include the return address. */
2222 frame_size += 1 * UNITS_PER_WORD;
2224 if (to == FRAME_POINTER_REGNUM)
2225 return frame_size;
2227 gcc_assert (to == STACK_POINTER_REGNUM);
2228 return frame_size + stack_size;
2231 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2232 return stack_size;
2235 /* Decide if a variable should go into one of the small data sections. */
2237 static bool
2238 rx_in_small_data (const_tree decl)
2240 int size;
2241 const char * section;
2243 if (rx_small_data_limit == 0)
2244 return false;
2246 if (TREE_CODE (decl) != VAR_DECL)
2247 return false;
2249 /* We do not put read-only variables into a small data area because
2250 they would be placed with the other read-only sections, far away
2251 from the read-write data sections, and we only have one small
2252 data area pointer.
2253 Similarly commons are placed in the .bss section which might be
2254 far away (and out of alignment with respect to) the .data section. */
2255 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2256 return false;
2258 section = DECL_SECTION_NAME (decl);
2259 if (section)
2260 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2262 size = int_size_in_bytes (TREE_TYPE (decl));
2264 return (size > 0) && (size <= rx_small_data_limit);
2267 /* Return a section for X.
2268 The only special thing we do here is to honor small data. */
2270 static section *
2271 rx_select_rtx_section (machine_mode mode,
2272 rtx x,
2273 unsigned HOST_WIDE_INT align)
2275 if (rx_small_data_limit > 0
2276 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2277 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2278 return sdata_section;
2280 return default_elf_select_rtx_section (mode, x, align);
2283 static section *
2284 rx_select_section (tree decl,
2285 int reloc,
2286 unsigned HOST_WIDE_INT align)
2288 if (rx_small_data_limit > 0)
2290 switch (categorize_decl_for_section (decl, reloc))
2292 case SECCAT_SDATA: return sdata_section;
2293 case SECCAT_SBSS: return sbss_section;
2294 case SECCAT_SRODATA:
2295 /* Fall through. We do not put small, read only
2296 data into the C_2 section because we are not
2297 using the C_2 section. We do not use the C_2
2298 section because it is located with the other
2299 read-only data sections, far away from the read-write
2300 data sections and we only have one small data
2301 pointer (r13). */
2302 default:
2303 break;
2307 /* If we are supporting the Renesas assembler
2308 we cannot use mergeable sections. */
2309 if (TARGET_AS100_SYNTAX)
2310 switch (categorize_decl_for_section (decl, reloc))
2312 case SECCAT_RODATA_MERGE_CONST:
2313 case SECCAT_RODATA_MERGE_STR_INIT:
2314 case SECCAT_RODATA_MERGE_STR:
2315 return readonly_data_section;
2317 default:
2318 break;
2321 return default_elf_select_section (decl, reloc, align);
2324 enum rx_builtin
2326 RX_BUILTIN_BRK,
2327 RX_BUILTIN_CLRPSW,
2328 RX_BUILTIN_INT,
2329 RX_BUILTIN_MACHI,
2330 RX_BUILTIN_MACLO,
2331 RX_BUILTIN_MULHI,
2332 RX_BUILTIN_MULLO,
2333 RX_BUILTIN_MVFACHI,
2334 RX_BUILTIN_MVFACMI,
2335 RX_BUILTIN_MVFC,
2336 RX_BUILTIN_MVTACHI,
2337 RX_BUILTIN_MVTACLO,
2338 RX_BUILTIN_MVTC,
2339 RX_BUILTIN_MVTIPL,
2340 RX_BUILTIN_RACW,
2341 RX_BUILTIN_REVW,
2342 RX_BUILTIN_RMPA,
2343 RX_BUILTIN_ROUND,
2344 RX_BUILTIN_SETPSW,
2345 RX_BUILTIN_WAIT,
2346 RX_BUILTIN_max
2349 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2351 static void
2352 rx_init_builtins (void)
2354 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2355 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2356 add_builtin_function ("__builtin_rx_" LC_NAME, \
2357 build_function_type_list (RET_TYPE##_type_node, \
2358 NULL_TREE), \
2359 RX_BUILTIN_##UC_NAME, \
2360 BUILT_IN_MD, NULL, NULL_TREE)
2362 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2363 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2364 add_builtin_function ("__builtin_rx_" LC_NAME, \
2365 build_function_type_list (RET_TYPE##_type_node, \
2366 ARG_TYPE##_type_node, \
2367 NULL_TREE), \
2368 RX_BUILTIN_##UC_NAME, \
2369 BUILT_IN_MD, NULL, NULL_TREE)
2371 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2372 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2373 add_builtin_function ("__builtin_rx_" LC_NAME, \
2374 build_function_type_list (RET_TYPE##_type_node, \
2375 ARG_TYPE1##_type_node,\
2376 ARG_TYPE2##_type_node,\
2377 NULL_TREE), \
2378 RX_BUILTIN_##UC_NAME, \
2379 BUILT_IN_MD, NULL, NULL_TREE)
2381 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2382 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2383 add_builtin_function ("__builtin_rx_" LC_NAME, \
2384 build_function_type_list (RET_TYPE##_type_node, \
2385 ARG_TYPE1##_type_node,\
2386 ARG_TYPE2##_type_node,\
2387 ARG_TYPE3##_type_node,\
2388 NULL_TREE), \
2389 RX_BUILTIN_##UC_NAME, \
2390 BUILT_IN_MD, NULL, NULL_TREE)
2392 ADD_RX_BUILTIN0 (BRK, "brk", void);
2393 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2394 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2395 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2396 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2397 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2398 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2399 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2400 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2401 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2402 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2403 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2404 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
2405 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2406 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
2407 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
2408 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2409 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2410 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
2411 ADD_RX_BUILTIN0 (WAIT, "wait", void);
2414 /* Return the RX builtin for CODE. */
2416 static tree
2417 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2419 if (code >= RX_BUILTIN_max)
2420 return error_mark_node;
2422 return rx_builtins[code];
2425 static rtx
2426 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2428 if (reg && ! REG_P (arg))
2429 arg = force_reg (SImode, arg);
2431 emit_insn (gen_func (arg));
2433 return NULL_RTX;
2436 static rtx
2437 rx_expand_builtin_mvtc (tree exp)
2439 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2440 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2442 if (! CONST_INT_P (arg1))
2443 return NULL_RTX;
2445 if (! REG_P (arg2))
2446 arg2 = force_reg (SImode, arg2);
2448 emit_insn (gen_mvtc (arg1, arg2));
2450 return NULL_RTX;
2453 static rtx
2454 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2456 rtx arg = expand_normal (t_arg);
2458 if (! CONST_INT_P (arg))
2459 return NULL_RTX;
2461 if (target == NULL_RTX)
2462 return NULL_RTX;
2464 if (! REG_P (target))
2465 target = force_reg (SImode, target);
2467 emit_insn (gen_mvfc (target, arg));
2469 return target;
2472 static rtx
2473 rx_expand_builtin_mvtipl (rtx arg)
2475 /* The RX610 does not support the MVTIPL instruction. */
2476 if (rx_cpu_type == RX610)
2477 return NULL_RTX;
2479 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2480 return NULL_RTX;
2482 emit_insn (gen_mvtipl (arg));
2484 return NULL_RTX;
2487 static rtx
2488 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2490 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2491 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2493 if (! REG_P (arg1))
2494 arg1 = force_reg (SImode, arg1);
2496 if (! REG_P (arg2))
2497 arg2 = force_reg (SImode, arg2);
2499 emit_insn (gen_func (arg1, arg2));
2501 return NULL_RTX;
2504 static rtx
2505 rx_expand_int_builtin_1_arg (rtx arg,
2506 rtx target,
2507 rtx (* gen_func)(rtx, rtx),
2508 bool mem_ok)
2510 if (! REG_P (arg))
2511 if (!mem_ok || ! MEM_P (arg))
2512 arg = force_reg (SImode, arg);
2514 if (target == NULL_RTX || ! REG_P (target))
2515 target = gen_reg_rtx (SImode);
2517 emit_insn (gen_func (target, arg));
2519 return target;
2522 static rtx
2523 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2525 if (target == NULL_RTX || ! REG_P (target))
2526 target = gen_reg_rtx (SImode);
2528 emit_insn (gen_func (target));
2530 return target;
2533 static rtx
2534 rx_expand_builtin_round (rtx arg, rtx target)
2536 if ((! REG_P (arg) && ! MEM_P (arg))
2537 || GET_MODE (arg) != SFmode)
2538 arg = force_reg (SFmode, arg);
2540 if (target == NULL_RTX || ! REG_P (target))
2541 target = gen_reg_rtx (SImode);
2543 emit_insn (gen_lrintsf2 (target, arg));
2545 return target;
2548 static int
2549 valid_psw_flag (rtx op, const char *which)
2551 static int mvtc_inform_done = 0;
2553 if (GET_CODE (op) == CONST_INT)
2554 switch (INTVAL (op))
2556 case 0: case 'c': case 'C':
2557 case 1: case 'z': case 'Z':
2558 case 2: case 's': case 'S':
2559 case 3: case 'o': case 'O':
2560 case 8: case 'i': case 'I':
2561 case 9: case 'u': case 'U':
2562 return 1;
2565 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2566 if (!mvtc_inform_done)
2567 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2568 mvtc_inform_done = 1;
2570 return 0;
2573 static rtx
2574 rx_expand_builtin (tree exp,
2575 rtx target,
2576 rtx subtarget ATTRIBUTE_UNUSED,
2577 machine_mode mode ATTRIBUTE_UNUSED,
2578 int ignore ATTRIBUTE_UNUSED)
2580 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2581 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2582 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2583 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2585 switch (fcode)
2587 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2588 case RX_BUILTIN_CLRPSW:
2589 if (!valid_psw_flag (op, "clrpsw"))
2590 return NULL_RTX;
2591 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2592 case RX_BUILTIN_SETPSW:
2593 if (!valid_psw_flag (op, "setpsw"))
2594 return NULL_RTX;
2595 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2596 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2597 (op, gen_int, false);
2598 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2599 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2600 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2601 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2602 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2603 (target, gen_mvfachi);
2604 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2605 (target, gen_mvfacmi);
2606 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2607 (op, gen_mvtachi, true);
2608 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2609 (op, gen_mvtaclo, true);
2610 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2611 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2612 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2613 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2614 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2615 (op, gen_racw, false);
2616 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2617 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2618 (op, target, gen_revw, false);
2619 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2621 default:
2622 internal_error ("bad builtin code");
2623 break;
2626 return NULL_RTX;
2629 /* Place an element into a constructor or destructor section.
2630 Like default_ctor_section_asm_out_constructor in varasm.c
2631 except that it uses .init_array (or .fini_array) and it
2632 handles constructor priorities. */
2634 static void
2635 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2637 section * s;
2639 if (priority != DEFAULT_INIT_PRIORITY)
2641 char buf[18];
2643 sprintf (buf, "%s.%.5u",
2644 is_ctor ? ".init_array" : ".fini_array",
2645 priority);
2646 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2648 else if (is_ctor)
2649 s = ctors_section;
2650 else
2651 s = dtors_section;
2653 switch_to_section (s);
2654 assemble_align (POINTER_SIZE);
2655 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2658 static void
2659 rx_elf_asm_constructor (rtx symbol, int priority)
2661 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2664 static void
2665 rx_elf_asm_destructor (rtx symbol, int priority)
2667 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2670 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2672 static tree
2673 rx_handle_func_attribute (tree * node,
2674 tree name,
2675 tree args ATTRIBUTE_UNUSED,
2676 int flags ATTRIBUTE_UNUSED,
2677 bool * no_add_attrs)
2679 gcc_assert (DECL_P (* node));
2681 if (TREE_CODE (* node) != FUNCTION_DECL)
2683 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2684 name);
2685 * no_add_attrs = true;
2688 /* FIXME: We ought to check for conflicting attributes. */
2690 /* FIXME: We ought to check that the interrupt and exception
2691 handler attributes have been applied to void functions. */
2692 return NULL_TREE;
2695 /* Check "vector" attribute. */
2697 static tree
2698 rx_handle_vector_attribute (tree * node,
2699 tree name,
2700 tree args,
2701 int flags ATTRIBUTE_UNUSED,
2702 bool * no_add_attrs)
2704 gcc_assert (DECL_P (* node));
2705 gcc_assert (args != NULL_TREE);
2707 if (TREE_CODE (* node) != FUNCTION_DECL)
2709 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2710 name);
2711 * no_add_attrs = true;
2714 return NULL_TREE;
2717 /* Table of RX specific attributes. */
2718 const struct attribute_spec rx_attribute_table[] =
2720 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, decl_handler,
2721 type_handler, affects_type_identity. */
2722 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute, NULL,
2723 false },
2724 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute, NULL,
2725 false },
2726 { "naked", 0, 0, true, false, false, rx_handle_func_attribute, NULL,
2727 false },
2728 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2729 NULL, false },
2730 { NULL, 0, 0, false, false, false, NULL, NULL, false }
2733 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2735 static void
2736 rx_override_options_after_change (void)
2738 static bool first_time = TRUE;
2740 if (first_time)
2742 /* If this is the first time through and the user has not disabled
2743 the use of RX FPU hardware then enable -ffinite-math-only,
2744 since the FPU instructions do not support NaNs and infinities. */
2745 if (TARGET_USE_FPU)
2746 flag_finite_math_only = 1;
2748 first_time = FALSE;
2750 else
2752 /* Alert the user if they are changing the optimization options
2753 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2754 if (TARGET_USE_FPU
2755 && !flag_finite_math_only)
2756 warning (0, "RX FPU instructions do not support NaNs and infinities");
2760 static void
2761 rx_option_override (void)
2763 unsigned int i;
2764 cl_deferred_option *opt;
2765 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2767 if (v)
2768 FOR_EACH_VEC_ELT (*v, i, opt)
2770 switch (opt->opt_index)
2772 case OPT_mint_register_:
2773 switch (opt->value)
2775 case 4:
2776 fixed_regs[10] = call_used_regs [10] = 1;
2777 /* Fall through. */
2778 case 3:
2779 fixed_regs[11] = call_used_regs [11] = 1;
2780 /* Fall through. */
2781 case 2:
2782 fixed_regs[12] = call_used_regs [12] = 1;
2783 /* Fall through. */
2784 case 1:
2785 fixed_regs[13] = call_used_regs [13] = 1;
2786 /* Fall through. */
2787 case 0:
2788 rx_num_interrupt_regs = opt->value;
2789 break;
2790 default:
2791 rx_num_interrupt_regs = 0;
2792 /* Error message already given because rx_handle_option
2793 returned false. */
2794 break;
2796 break;
2798 default:
2799 gcc_unreachable ();
2803 /* This target defaults to strict volatile bitfields. */
2804 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2805 flag_strict_volatile_bitfields = 1;
2807 rx_override_options_after_change ();
2809 /* These values are bytes, not log. */
2810 if (align_jumps == 0 && ! optimize_size)
2811 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2812 if (align_loops == 0 && ! optimize_size)
2813 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2814 if (align_labels == 0 && ! optimize_size)
2815 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2819 static bool
2820 rx_allocate_stack_slots_for_args (void)
2822 /* Naked functions should not allocate stack slots for arguments. */
2823 return ! is_naked_func (NULL_TREE);
2826 static bool
2827 rx_func_attr_inlinable (const_tree decl)
2829 return ! is_fast_interrupt_func (decl)
2830 && ! is_interrupt_func (decl)
2831 && ! is_naked_func (decl);
2834 static bool
2835 rx_warn_func_return (tree decl)
2837 /* Naked functions are implemented entirely in assembly, including the
2838 return sequence, so suppress warnings about this. */
2839 return !is_naked_func (decl);
2842 /* Return nonzero if it is ok to make a tail-call to DECL,
2843 a function_decl or NULL if this is an indirect call, using EXP */
2845 static bool
2846 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2848 /* Do not allow indirect tailcalls. The
2849 sibcall patterns do not support them. */
2850 if (decl == NULL)
2851 return false;
2853 /* Never tailcall from inside interrupt handlers or naked functions. */
2854 if (is_fast_interrupt_func (NULL_TREE)
2855 || is_interrupt_func (NULL_TREE)
2856 || is_naked_func (NULL_TREE))
2857 return false;
2859 return true;
2862 static void
2863 rx_file_start (void)
2865 if (! TARGET_AS100_SYNTAX)
2866 default_file_start ();
2869 static bool
2870 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2872 /* The packed attribute overrides the MS behaviour. */
2873 return ! TYPE_PACKED (record_type);
2876 /* Returns true if X a legitimate constant for an immediate
2877 operand on the RX. X is already known to satisfy CONSTANT_P. */
2879 bool
2880 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2882 switch (GET_CODE (x))
2884 case CONST:
2885 x = XEXP (x, 0);
2887 if (GET_CODE (x) == PLUS)
2889 if (! CONST_INT_P (XEXP (x, 1)))
2890 return false;
2892 /* GCC would not pass us CONST_INT + CONST_INT so we
2893 know that we have {SYMBOL|LABEL} + CONST_INT. */
2894 x = XEXP (x, 0);
2895 gcc_assert (! CONST_INT_P (x));
2898 switch (GET_CODE (x))
2900 case LABEL_REF:
2901 case SYMBOL_REF:
2902 return true;
2904 case UNSPEC:
2905 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2907 default:
2908 /* FIXME: Can this ever happen ? */
2909 gcc_unreachable ();
2911 break;
2913 case LABEL_REF:
2914 case SYMBOL_REF:
2915 return true;
2916 case CONST_DOUBLE:
2917 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2918 case CONST_VECTOR:
2919 return false;
2920 default:
2921 gcc_assert (CONST_INT_P (x));
2922 break;
2925 return ok_for_max_constant (INTVAL (x));
2928 static int
2929 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2930 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2932 rtx a, b;
2934 if (GET_CODE (addr) != PLUS)
2935 return COSTS_N_INSNS (1);
2937 a = XEXP (addr, 0);
2938 b = XEXP (addr, 1);
2940 if (REG_P (a) && REG_P (b))
2941 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2942 return COSTS_N_INSNS (4);
2944 if (speed)
2945 /* [REG+OFF] is just as fast as [REG]. */
2946 return COSTS_N_INSNS (1);
2948 if (CONST_INT_P (b)
2949 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2950 /* Try to discourage REG + <large OFF> when optimizing for size. */
2951 return COSTS_N_INSNS (2);
2953 return COSTS_N_INSNS (1);
2956 static bool
2957 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2959 /* We can always eliminate to the frame pointer.
2960 We can eliminate to the stack pointer unless a frame
2961 pointer is needed. */
2963 return to == FRAME_POINTER_REGNUM
2964 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2968 static void
2969 rx_trampoline_template (FILE * file)
2971 /* Output assembler code for a block containing the constant
2972 part of a trampoline, leaving space for the variable parts.
2974 On the RX, (where r8 is the static chain regnum) the trampoline
2975 looks like:
2977 mov #<static chain value>, r8
2978 mov #<function's address>, r9
2979 jmp r9
2981 In big-endian-data-mode however instructions are read into the CPU
2982 4 bytes at a time. These bytes are then swapped around before being
2983 passed to the decoder. So...we must partition our trampoline into
2984 4 byte packets and swap these packets around so that the instruction
2985 reader will reverse the process. But, in order to avoid splitting
2986 the 32-bit constants across these packet boundaries, (making inserting
2987 them into the constructed trampoline very difficult) we have to pad the
2988 instruction sequence with NOP insns. ie:
2992 mov.l #<...>, r8
2995 mov.l #<...>, r9
2996 jmp r9
2998 nop */
3000 if (! TARGET_BIG_ENDIAN_DATA)
3002 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3003 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3004 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3006 else
3008 char r8 = '0' + STATIC_CHAIN_REGNUM;
3009 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3011 if (TARGET_AS100_SYNTAX)
3013 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3014 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3015 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3016 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3017 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3019 else
3021 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3022 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3023 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3024 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3025 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3030 static void
3031 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3033 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3035 emit_block_move (tramp, assemble_trampoline_template (),
3036 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3038 if (TARGET_BIG_ENDIAN_DATA)
3040 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3041 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3043 else
3045 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3046 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3050 static int
3051 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3052 reg_class_t regclass ATTRIBUTE_UNUSED,
3053 bool in)
3055 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3058 /* Convert a CC_MODE to the set of flags that it represents. */
3060 static unsigned int
3061 flags_from_mode (machine_mode mode)
3063 switch (mode)
3065 case CC_ZSmode:
3066 return CC_FLAG_S | CC_FLAG_Z;
3067 case CC_ZSOmode:
3068 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3069 case CC_ZSCmode:
3070 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3071 case CCmode:
3072 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3073 case CC_Fmode:
3074 return CC_FLAG_FP;
3075 default:
3076 gcc_unreachable ();
3080 /* Convert a set of flags to a CC_MODE that can implement it. */
3082 static machine_mode
3083 mode_from_flags (unsigned int f)
3085 if (f & CC_FLAG_FP)
3086 return CC_Fmode;
3087 if (f & CC_FLAG_O)
3089 if (f & CC_FLAG_C)
3090 return CCmode;
3091 else
3092 return CC_ZSOmode;
3094 else if (f & CC_FLAG_C)
3095 return CC_ZSCmode;
3096 else
3097 return CC_ZSmode;
3100 /* Convert an RTX_CODE to the set of flags needed to implement it.
3101 This assumes an integer comparison. */
3103 static unsigned int
3104 flags_from_code (enum rtx_code code)
3106 switch (code)
3108 case LT:
3109 case GE:
3110 return CC_FLAG_S;
3111 case GT:
3112 case LE:
3113 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3114 case GEU:
3115 case LTU:
3116 return CC_FLAG_C;
3117 case GTU:
3118 case LEU:
3119 return CC_FLAG_C | CC_FLAG_Z;
3120 case EQ:
3121 case NE:
3122 return CC_FLAG_Z;
3123 default:
3124 gcc_unreachable ();
3128 /* Return a CC_MODE of which both M1 and M2 are subsets. */
3130 static machine_mode
3131 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3133 unsigned f;
3135 /* Early out for identical modes. */
3136 if (m1 == m2)
3137 return m1;
3139 /* There's no valid combination for FP vs non-FP. */
3140 f = flags_from_mode (m1) | flags_from_mode (m2);
3141 if (f & CC_FLAG_FP)
3142 return VOIDmode;
3144 /* Otherwise, see what mode can implement all the flags. */
3145 return mode_from_flags (f);
3148 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3150 machine_mode
3151 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3153 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3154 return CC_Fmode;
3156 if (y != const0_rtx)
3157 return CCmode;
3159 return mode_from_flags (flags_from_code (cmp_code));
3162 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3163 CC_MODE, and use that in branches based on that compare. */
3165 void
3166 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3167 rtx c1, rtx c2, rtx label)
3169 rtx flags, x;
3171 flags = gen_rtx_REG (cc_mode, CC_REG);
3172 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3173 x = gen_rtx_SET (VOIDmode, flags, x);
3174 emit_insn (x);
3176 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3177 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3178 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3179 emit_jump_insn (x);
3182 /* A helper function for matching parallels that set the flags. */
3184 bool
3185 rx_match_ccmode (rtx insn, machine_mode cc_mode)
3187 rtx op1, flags;
3188 machine_mode flags_mode;
3190 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3192 op1 = XVECEXP (PATTERN (insn), 0, 1);
3193 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3195 flags = SET_DEST (op1);
3196 flags_mode = GET_MODE (flags);
3198 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3199 return false;
3200 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3201 return false;
3203 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3204 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3205 return false;
3207 return true;
3211 rx_align_for_label (rtx lab, int uses_threshold)
3213 /* This is a simple heuristic to guess when an alignment would not be useful
3214 because the delay due to the inserted NOPs would be greater than the delay
3215 due to the misaligned branch. If uses_threshold is zero then the alignment
3216 is always useful. */
3217 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3218 return 0;
3220 if (optimize_size)
3221 return 0;
3222 /* These values are log, not bytes. */
3223 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3224 return 2; /* 4 bytes */
3225 return 3; /* 8 bytes */
3228 static int
3229 rx_max_skip_for_label (rtx_insn *lab)
3231 int opsize;
3232 rtx_insn *op;
3234 if (optimize_size)
3235 return 0;
3237 if (lab == NULL)
3238 return 0;
3240 op = lab;
3243 op = next_nonnote_nondebug_insn (op);
3245 while (op && (LABEL_P (op)
3246 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3247 if (!op)
3248 return 0;
3250 opsize = get_attr_length (op);
3251 if (opsize >= 0 && opsize < 8)
3252 return opsize - 1;
3253 return 0;
3256 /* Compute the real length of the extending load-and-op instructions. */
3259 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3261 rtx extend, mem, offset;
3262 bool zero;
3263 int factor;
3265 if (!INSN_P (insn))
3266 return current_length;
3268 switch (INSN_CODE (insn))
3270 default:
3271 return current_length;
3273 case CODE_FOR_plussi3_zero_extendhi:
3274 case CODE_FOR_andsi3_zero_extendhi:
3275 case CODE_FOR_iorsi3_zero_extendhi:
3276 case CODE_FOR_xorsi3_zero_extendhi:
3277 case CODE_FOR_divsi3_zero_extendhi:
3278 case CODE_FOR_udivsi3_zero_extendhi:
3279 case CODE_FOR_minussi3_zero_extendhi:
3280 case CODE_FOR_smaxsi3_zero_extendhi:
3281 case CODE_FOR_sminsi3_zero_extendhi:
3282 case CODE_FOR_multsi3_zero_extendhi:
3283 case CODE_FOR_comparesi3_zero_extendhi:
3284 zero = true;
3285 factor = 2;
3286 break;
3288 case CODE_FOR_plussi3_sign_extendhi:
3289 case CODE_FOR_andsi3_sign_extendhi:
3290 case CODE_FOR_iorsi3_sign_extendhi:
3291 case CODE_FOR_xorsi3_sign_extendhi:
3292 case CODE_FOR_divsi3_sign_extendhi:
3293 case CODE_FOR_udivsi3_sign_extendhi:
3294 case CODE_FOR_minussi3_sign_extendhi:
3295 case CODE_FOR_smaxsi3_sign_extendhi:
3296 case CODE_FOR_sminsi3_sign_extendhi:
3297 case CODE_FOR_multsi3_sign_extendhi:
3298 case CODE_FOR_comparesi3_sign_extendhi:
3299 zero = false;
3300 factor = 2;
3301 break;
3303 case CODE_FOR_plussi3_zero_extendqi:
3304 case CODE_FOR_andsi3_zero_extendqi:
3305 case CODE_FOR_iorsi3_zero_extendqi:
3306 case CODE_FOR_xorsi3_zero_extendqi:
3307 case CODE_FOR_divsi3_zero_extendqi:
3308 case CODE_FOR_udivsi3_zero_extendqi:
3309 case CODE_FOR_minussi3_zero_extendqi:
3310 case CODE_FOR_smaxsi3_zero_extendqi:
3311 case CODE_FOR_sminsi3_zero_extendqi:
3312 case CODE_FOR_multsi3_zero_extendqi:
3313 case CODE_FOR_comparesi3_zero_extendqi:
3314 zero = true;
3315 factor = 1;
3316 break;
3318 case CODE_FOR_plussi3_sign_extendqi:
3319 case CODE_FOR_andsi3_sign_extendqi:
3320 case CODE_FOR_iorsi3_sign_extendqi:
3321 case CODE_FOR_xorsi3_sign_extendqi:
3322 case CODE_FOR_divsi3_sign_extendqi:
3323 case CODE_FOR_udivsi3_sign_extendqi:
3324 case CODE_FOR_minussi3_sign_extendqi:
3325 case CODE_FOR_smaxsi3_sign_extendqi:
3326 case CODE_FOR_sminsi3_sign_extendqi:
3327 case CODE_FOR_multsi3_sign_extendqi:
3328 case CODE_FOR_comparesi3_sign_extendqi:
3329 zero = false;
3330 factor = 1;
3331 break;
3334 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3335 extend = single_set (insn);
3336 gcc_assert (extend != NULL_RTX);
3338 extend = SET_SRC (extend);
3339 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3340 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3341 extend = XEXP (extend, 0);
3342 else
3343 extend = XEXP (extend, 1);
3345 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3346 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3348 mem = XEXP (extend, 0);
3349 gcc_checking_assert (MEM_P (mem));
3350 if (REG_P (XEXP (mem, 0)))
3351 return (zero && factor == 1) ? 2 : 3;
3353 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3354 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3355 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3357 offset = XEXP (XEXP (mem, 0), 1);
3358 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3360 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3361 return (zero && factor == 1) ? 3 : 4;
3363 return (zero && factor == 1) ? 4 : 5;
3366 static bool
3367 rx_narrow_volatile_bitfield (void)
3369 return true;
3372 static bool
3373 rx_ok_to_inline (tree caller, tree callee)
3375 /* Do not inline functions with local variables
3376 into a naked CALLER - naked function have no stack frame and
3377 locals need a frame in order to have somewhere to live.
3379 Unfortunately we have no way to determine the presence of
3380 local variables in CALLEE, so we have to be cautious and
3381 assume that there might be some there.
3383 We do allow inlining when CALLEE has the "inline" type
3384 modifier or the "always_inline" or "gnu_inline" attributes. */
3385 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3386 || DECL_DECLARED_INLINE_P (callee)
3387 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3388 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3391 static bool
3392 rx_enable_lra (void)
3394 return TARGET_ENABLE_LRA;
3398 #undef TARGET_NARROW_VOLATILE_BITFIELD
3399 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3401 #undef TARGET_CAN_INLINE_P
3402 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3404 #undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3405 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3406 #undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3407 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3408 #undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3409 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3410 #undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3411 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3413 #undef TARGET_FUNCTION_VALUE
3414 #define TARGET_FUNCTION_VALUE rx_function_value
3416 #undef TARGET_RETURN_IN_MSB
3417 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3419 #undef TARGET_IN_SMALL_DATA_P
3420 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3422 #undef TARGET_RETURN_IN_MEMORY
3423 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3425 #undef TARGET_HAVE_SRODATA_SECTION
3426 #define TARGET_HAVE_SRODATA_SECTION true
3428 #undef TARGET_ASM_SELECT_RTX_SECTION
3429 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3431 #undef TARGET_ASM_SELECT_SECTION
3432 #define TARGET_ASM_SELECT_SECTION rx_select_section
3434 #undef TARGET_INIT_BUILTINS
3435 #define TARGET_INIT_BUILTINS rx_init_builtins
3437 #undef TARGET_BUILTIN_DECL
3438 #define TARGET_BUILTIN_DECL rx_builtin_decl
3440 #undef TARGET_EXPAND_BUILTIN
3441 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3443 #undef TARGET_ASM_CONSTRUCTOR
3444 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3446 #undef TARGET_ASM_DESTRUCTOR
3447 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3449 #undef TARGET_STRUCT_VALUE_RTX
3450 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3452 #undef TARGET_ATTRIBUTE_TABLE
3453 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3455 #undef TARGET_ASM_FILE_START
3456 #define TARGET_ASM_FILE_START rx_file_start
3458 #undef TARGET_MS_BITFIELD_LAYOUT_P
3459 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3461 #undef TARGET_LEGITIMATE_ADDRESS_P
3462 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3464 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3465 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3467 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3468 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3470 #undef TARGET_ASM_FUNCTION_PROLOGUE
3471 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3473 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3474 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3476 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3477 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3479 #undef TARGET_FUNCTION_ARG
3480 #define TARGET_FUNCTION_ARG rx_function_arg
3482 #undef TARGET_FUNCTION_ARG_ADVANCE
3483 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3485 #undef TARGET_FUNCTION_ARG_BOUNDARY
3486 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3488 #undef TARGET_SET_CURRENT_FUNCTION
3489 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3491 #undef TARGET_ASM_INTEGER
3492 #define TARGET_ASM_INTEGER rx_assemble_integer
3494 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3495 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3497 #undef TARGET_MAX_ANCHOR_OFFSET
3498 #define TARGET_MAX_ANCHOR_OFFSET 32
3500 #undef TARGET_ADDRESS_COST
3501 #define TARGET_ADDRESS_COST rx_address_cost
3503 #undef TARGET_CAN_ELIMINATE
3504 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3506 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3507 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3509 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3510 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3512 #undef TARGET_TRAMPOLINE_INIT
3513 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3515 #undef TARGET_PRINT_OPERAND
3516 #define TARGET_PRINT_OPERAND rx_print_operand
3518 #undef TARGET_PRINT_OPERAND_ADDRESS
3519 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3521 #undef TARGET_CC_MODES_COMPATIBLE
3522 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3524 #undef TARGET_MEMORY_MOVE_COST
3525 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3527 #undef TARGET_OPTION_OVERRIDE
3528 #define TARGET_OPTION_OVERRIDE rx_option_override
3530 #undef TARGET_PROMOTE_FUNCTION_MODE
3531 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3533 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3534 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3536 #undef TARGET_FLAGS_REGNUM
3537 #define TARGET_FLAGS_REGNUM CC_REG
3539 #undef TARGET_LEGITIMATE_CONSTANT_P
3540 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3542 #undef TARGET_LEGITIMIZE_ADDRESS
3543 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3545 #undef TARGET_WARN_FUNC_RETURN
3546 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3548 #undef TARGET_LRA_P
3549 #define TARGET_LRA_P rx_enable_lra
3551 struct gcc_target targetm = TARGET_INITIALIZER;
3553 #include "gt-rx.h"