tree.h: Include real.h and fixed-value.h as basic datatypes.
[official-gcc.git] / gcc / config / rx / rx.c
blobb5a887b64eb95f006fdc73212c885aeef5bf8603
1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* To Do:
23 * Re-enable memory-to-memory copies and fix up reload. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "toplev.h"
44 #include "reload.h"
45 #include "df.h"
46 #include "ggc.h"
47 #include "tm_p.h"
48 #include "debug.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 enum rx_cpu_types rx_cpu_type = RX600;
55 /* Return true if OP is a reference to an object in a small data area. */
57 static bool
58 rx_small_data_operand (rtx op)
60 if (rx_small_data_limit == 0)
61 return false;
63 if (GET_CODE (op) == SYMBOL_REF)
64 return SYMBOL_REF_SMALL_P (op);
66 return false;
69 static bool
70 rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
72 if (RTX_OK_FOR_BASE (x, strict))
73 /* Register Indirect. */
74 return true;
76 if (GET_MODE_SIZE (mode) == 4
77 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
78 /* Pre-decrement Register Indirect or
79 Post-increment Register Indirect. */
80 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
82 if (GET_CODE (x) == PLUS)
84 rtx arg1 = XEXP (x, 0);
85 rtx arg2 = XEXP (x, 1);
86 rtx index = NULL_RTX;
88 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
89 index = arg2;
90 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
91 index = arg1;
92 else
93 return false;
95 switch (GET_CODE (index))
97 case CONST_INT:
99 /* Register Relative: REG + INT.
100 Only positive, mode-aligned, mode-sized
101 displacements are allowed. */
102 HOST_WIDE_INT val = INTVAL (index);
103 int factor;
105 if (val < 0)
106 return false;
108 switch (GET_MODE_SIZE (mode))
110 default:
111 case 4: factor = 4; break;
112 case 2: factor = 2; break;
113 case 1: factor = 1; break;
116 if (val > (65535 * factor))
117 return false;
118 return (val % factor) == 0;
121 case REG:
122 /* Unscaled Indexed Register Indirect: REG + REG
123 Size has to be "QI", REG has to be valid. */
124 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
126 case MULT:
128 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
129 Factor has to equal the mode size, REG has to be valid. */
130 rtx factor;
132 factor = XEXP (index, 1);
133 index = XEXP (index, 0);
135 return REG_P (index)
136 && RTX_OK_FOR_BASE (index, strict)
137 && CONST_INT_P (factor)
138 && GET_MODE_SIZE (mode) == INTVAL (factor);
141 default:
142 return false;
146 /* Small data area accesses turn into register relative offsets. */
147 return rx_small_data_operand (x);
150 /* Returns TRUE for simple memory addreses, ie ones
151 that do not involve register indirect addressing
152 or pre/post increment/decrement. */
154 bool
155 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
157 rtx base, index;
159 if (! rx_is_legitimate_address
160 (mode, mem, reload_in_progress || reload_completed))
161 return false;
163 switch (GET_CODE (mem))
165 case REG:
166 /* Simple memory addresses are OK. */
167 return true;
169 case PRE_DEC:
170 case POST_INC:
171 return false;
173 case PLUS:
174 /* Only allow REG+INT addressing. */
175 base = XEXP (mem, 0);
176 index = XEXP (mem, 1);
178 return RX_REG_P (base) && CONST_INT_P (index);
180 case SYMBOL_REF:
181 /* Can happen when small data is being supported.
182 Assume that it will be resolved into GP+INT. */
183 return true;
185 default:
186 gcc_unreachable ();
190 bool
191 rx_is_mode_dependent_addr (rtx addr)
193 if (GET_CODE (addr) == CONST)
194 addr = XEXP (addr, 0);
196 switch (GET_CODE (addr))
198 /* --REG and REG++ only work in SImode. */
199 case PRE_DEC:
200 case POST_INC:
201 return true;
203 case MINUS:
204 case PLUS:
205 if (! REG_P (XEXP (addr, 0)))
206 return true;
208 addr = XEXP (addr, 1);
210 switch (GET_CODE (addr))
212 case REG:
213 /* REG+REG only works in SImode. */
214 return true;
216 case CONST_INT:
217 /* REG+INT is only mode independent if INT is a
218 multiple of 4, positive and will fit into 8-bits. */
219 if (((INTVAL (addr) & 3) == 0)
220 && IN_RANGE (INTVAL (addr), 4, 252))
221 return false;
222 return true;
224 case SYMBOL_REF:
225 case LABEL_REF:
226 return true;
228 case MULT:
229 gcc_assert (REG_P (XEXP (addr, 0)));
230 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
231 /* REG+REG*SCALE is always mode dependent. */
232 return true;
234 default:
235 /* Not recognized, so treat as mode dependent. */
236 return true;
239 case CONST_INT:
240 case SYMBOL_REF:
241 case LABEL_REF:
242 case REG:
243 /* These are all mode independent. */
244 return false;
246 default:
247 /* Everything else is unrecognized,
248 so treat as mode dependent. */
249 return true;
253 /* A C compound statement to output to stdio stream FILE the
254 assembler syntax for an instruction operand that is a memory
255 reference whose address is ADDR. */
257 void
258 rx_print_operand_address (FILE * file, rtx addr)
260 switch (GET_CODE (addr))
262 case REG:
263 fprintf (file, "[");
264 rx_print_operand (file, addr, 0);
265 fprintf (file, "]");
266 break;
268 case PRE_DEC:
269 fprintf (file, "[-");
270 rx_print_operand (file, XEXP (addr, 0), 0);
271 fprintf (file, "]");
272 break;
274 case POST_INC:
275 fprintf (file, "[");
276 rx_print_operand (file, XEXP (addr, 0), 0);
277 fprintf (file, "+]");
278 break;
280 case PLUS:
282 rtx arg1 = XEXP (addr, 0);
283 rtx arg2 = XEXP (addr, 1);
284 rtx base, index;
286 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
287 base = arg1, index = arg2;
288 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
289 base = arg2, index = arg1;
290 else
292 rx_print_operand (file, arg1, 0);
293 fprintf (file, " + ");
294 rx_print_operand (file, arg2, 0);
295 break;
298 if (REG_P (index) || GET_CODE (index) == MULT)
300 fprintf (file, "[");
301 rx_print_operand (file, index, 'A');
302 fprintf (file, ",");
304 else /* GET_CODE (index) == CONST_INT */
306 rx_print_operand (file, index, 'A');
307 fprintf (file, "[");
309 rx_print_operand (file, base, 0);
310 fprintf (file, "]");
311 break;
314 case LABEL_REF:
315 case SYMBOL_REF:
316 case CONST:
317 fprintf (file, "#");
318 default:
319 output_addr_const (file, addr);
320 break;
324 static void
325 rx_print_integer (FILE * file, HOST_WIDE_INT val)
327 if (IN_RANGE (val, -64, 64))
328 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
329 else
330 fprintf (file,
331 TARGET_AS100_SYNTAX
332 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
333 val);
336 static bool
337 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
339 const char * op = integer_asm_op (size, is_aligned);
341 if (! CONST_INT_P (x))
342 return default_assemble_integer (x, size, is_aligned);
344 if (op == NULL)
345 return false;
346 fputs (op, asm_out_file);
348 rx_print_integer (asm_out_file, INTVAL (x));
349 fputc ('\n', asm_out_file);
350 return true;
354 int rx_float_compare_mode;
356 /* Handles the insertion of a single operand into the assembler output.
357 The %<letter> directives supported are:
359 %A Print an operand without a leading # character.
360 %B Print an integer comparison name.
361 %C Print a control register name.
362 %F Print a condition code flag name.
363 %H Print high part of a DImode register, integer or address.
364 %L Print low part of a DImode register, integer or address.
365 %Q If the operand is a MEM, then correctly generate
366 register indirect or register relative addressing. */
368 void
369 rx_print_operand (FILE * file, rtx op, int letter)
371 switch (letter)
373 case 'A':
374 /* Print an operand without a leading #. */
375 if (MEM_P (op))
376 op = XEXP (op, 0);
378 switch (GET_CODE (op))
380 case LABEL_REF:
381 case SYMBOL_REF:
382 output_addr_const (file, op);
383 break;
384 case CONST_INT:
385 fprintf (file, "%ld", (long) INTVAL (op));
386 break;
387 default:
388 rx_print_operand (file, op, 0);
389 break;
391 break;
393 case 'B':
394 switch (GET_CODE (op))
396 case LT: fprintf (file, "lt"); break;
397 case GE: fprintf (file, "ge"); break;
398 case GT: fprintf (file, "gt"); break;
399 case LE: fprintf (file, "le"); break;
400 case GEU: fprintf (file, "geu"); break;
401 case LTU: fprintf (file, "ltu"); break;
402 case GTU: fprintf (file, "gtu"); break;
403 case LEU: fprintf (file, "leu"); break;
404 case EQ: fprintf (file, "eq"); break;
405 case NE: fprintf (file, "ne"); break;
406 default: debug_rtx (op); gcc_unreachable ();
408 break;
410 case 'C':
411 gcc_assert (CONST_INT_P (op));
412 switch (INTVAL (op))
414 case 0: fprintf (file, "psw"); break;
415 case 2: fprintf (file, "usp"); break;
416 case 3: fprintf (file, "fpsw"); break;
417 case 4: fprintf (file, "cpen"); break;
418 case 8: fprintf (file, "bpsw"); break;
419 case 9: fprintf (file, "bpc"); break;
420 case 0xa: fprintf (file, "isp"); break;
421 case 0xb: fprintf (file, "fintv"); break;
422 case 0xc: fprintf (file, "intb"); break;
423 default:
424 warning (0, "unreocgnized control register number: %d - using 'psw'",
425 INTVAL (op));
426 fprintf (file, "psw");
427 break;
429 break;
431 case 'F':
432 gcc_assert (CONST_INT_P (op));
433 switch (INTVAL (op))
435 case 0: case 'c': case 'C': fprintf (file, "C"); break;
436 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
437 case 2: case 's': case 'S': fprintf (file, "S"); break;
438 case 3: case 'o': case 'O': fprintf (file, "O"); break;
439 case 8: case 'i': case 'I': fprintf (file, "I"); break;
440 case 9: case 'u': case 'U': fprintf (file, "U"); break;
441 default:
442 gcc_unreachable ();
444 break;
446 case 'H':
447 if (REG_P (op))
448 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
449 else if (CONST_INT_P (op))
451 HOST_WIDE_INT v = INTVAL (op);
453 fprintf (file, "#");
454 /* Trickery to avoid problems with shifting 32 bits at a time. */
455 v = v >> 16;
456 v = v >> 16;
457 rx_print_integer (file, v);
459 else
461 gcc_assert (MEM_P (op));
463 if (! WORDS_BIG_ENDIAN)
464 op = adjust_address (op, SImode, 4);
465 output_address (XEXP (op, 0));
467 break;
469 case 'L':
470 if (REG_P (op))
471 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
472 else if (CONST_INT_P (op))
474 fprintf (file, "#");
475 rx_print_integer (file, INTVAL (op) & 0xffffffff);
477 else
479 gcc_assert (MEM_P (op));
481 if (WORDS_BIG_ENDIAN)
482 op = adjust_address (op, SImode, 4);
483 output_address (XEXP (op, 0));
485 break;
487 case 'Q':
488 if (MEM_P (op))
490 HOST_WIDE_INT offset;
492 op = XEXP (op, 0);
494 if (REG_P (op))
495 offset = 0;
496 else if (GET_CODE (op) == PLUS)
498 rtx displacement;
500 if (REG_P (XEXP (op, 0)))
502 displacement = XEXP (op, 1);
503 op = XEXP (op, 0);
505 else
507 displacement = XEXP (op, 0);
508 op = XEXP (op, 1);
509 gcc_assert (REG_P (op));
512 gcc_assert (CONST_INT_P (displacement));
513 offset = INTVAL (displacement);
514 gcc_assert (offset >= 0);
516 fprintf (file, "%ld", offset);
518 else
519 gcc_unreachable ();
521 fprintf (file, "[");
522 rx_print_operand (file, op, 0);
523 fprintf (file, "].");
525 switch (GET_MODE_SIZE (GET_MODE (op)))
527 case 1:
528 gcc_assert (offset < 65535 * 1);
529 fprintf (file, "B");
530 break;
531 case 2:
532 gcc_assert (offset % 2 == 0);
533 gcc_assert (offset < 65535 * 2);
534 fprintf (file, "W");
535 break;
536 default:
537 gcc_assert (offset % 4 == 0);
538 gcc_assert (offset < 65535 * 4);
539 fprintf (file, "L");
540 break;
542 break;
545 /* Fall through. */
547 default:
548 switch (GET_CODE (op))
550 case MULT:
551 /* Should be the scaled part of an
552 indexed register indirect address. */
554 rtx base = XEXP (op, 0);
555 rtx index = XEXP (op, 1);
557 /* Check for a swaped index register and scaling factor.
558 Not sure if this can happen, but be prepared to handle it. */
559 if (CONST_INT_P (base) && REG_P (index))
561 rtx tmp = base;
562 base = index;
563 index = tmp;
566 gcc_assert (REG_P (base));
567 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
568 gcc_assert (CONST_INT_P (index));
569 /* Do not try to verify the value of the scalar as it is based
570 on the mode of the MEM not the mode of the MULT. (Which
571 will always be SImode). */
572 fprintf (file, "%s", reg_names [REGNO (base)]);
573 break;
576 case MEM:
577 output_address (XEXP (op, 0));
578 break;
580 case PLUS:
581 output_address (op);
582 break;
584 case REG:
585 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
586 fprintf (file, "%s", reg_names [REGNO (op)]);
587 break;
589 case SUBREG:
590 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
591 fprintf (file, "%s", reg_names [subreg_regno (op)]);
592 break;
594 /* This will only be single precision.... */
595 case CONST_DOUBLE:
597 unsigned long val;
598 REAL_VALUE_TYPE rv;
600 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
601 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
602 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
603 break;
606 case CONST_INT:
607 fprintf (file, "#");
608 rx_print_integer (file, INTVAL (op));
609 break;
611 case SYMBOL_REF:
612 case CONST:
613 case LABEL_REF:
614 case CODE_LABEL:
615 case UNSPEC:
616 rx_print_operand_address (file, op);
617 break;
619 default:
620 gcc_unreachable ();
622 break;
626 /* Returns an assembler template for a move instruction. */
628 char *
629 rx_gen_move_template (rtx * operands, bool is_movu)
631 static char template [64];
632 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
633 const char * src_template;
634 const char * dst_template;
635 rtx dest = operands[0];
636 rtx src = operands[1];
638 /* Decide which extension, if any, should be given to the move instruction. */
639 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
641 case QImode:
642 /* The .B extension is not valid when
643 loading an immediate into a register. */
644 if (! REG_P (dest) || ! CONST_INT_P (src))
645 extension = ".B";
646 break;
647 case HImode:
648 if (! REG_P (dest) || ! CONST_INT_P (src))
649 /* The .W extension is not valid when
650 loading an immediate into a register. */
651 extension = ".W";
652 break;
653 case SFmode:
654 case SImode:
655 extension = ".L";
656 break;
657 case VOIDmode:
658 /* This mode is used by constants. */
659 break;
660 default:
661 debug_rtx (src);
662 gcc_unreachable ();
665 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
666 src_template = "%%gp(%A1)[r13]";
667 else
668 src_template = "%1";
670 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
671 dst_template = "%%gp(%A0)[r13]";
672 else
673 dst_template = "%0";
675 sprintf (template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
676 extension, src_template, dst_template);
677 return template;
680 /* Returns an assembler template for a conditional branch instruction. */
682 const char *
683 rx_gen_cond_branch_template (rtx condition, bool reversed)
685 enum rtx_code code = GET_CODE (condition);
688 if ((cc_status.flags & CC_NO_OVERFLOW) && ! rx_float_compare_mode)
689 gcc_assert (code != GT && code != GE && code != LE && code != LT);
691 if ((cc_status.flags & CC_NO_CARRY) || rx_float_compare_mode)
692 gcc_assert (code != GEU && code != GTU && code != LEU && code != LTU);
694 if (reversed)
696 if (rx_float_compare_mode)
697 code = reverse_condition_maybe_unordered (code);
698 else
699 code = reverse_condition (code);
702 /* We do not worry about encoding the branch length here as GAS knows
703 how to choose the smallest version, and how to expand a branch that
704 is to a destination that is out of range. */
706 switch (code)
708 case UNEQ: return "bo\t1f\n\tbeq\t%0\n1:";
709 case LTGT: return "bo\t1f\n\tbne\t%0\n1:";
710 case UNLT: return "bo\t1f\n\tbn\t%0\n1:";
711 case UNGE: return "bo\t1f\n\tbpz\t%0\n1:";
712 case UNLE: return "bo\t1f\n\tbgt\t1f\n\tbra\t%0\n1:";
713 case UNGT: return "bo\t1f\n\tble\t1f\n\tbra\t%0\n1:";
714 case UNORDERED: return "bo\t%0";
715 case ORDERED: return "bno\t%0";
717 case LT: return rx_float_compare_mode ? "bn\t%0" : "blt\t%0";
718 case GE: return rx_float_compare_mode ? "bpz\t%0" : "bge\t%0";
719 case GT: return "bgt\t%0";
720 case LE: return "ble\t%0";
721 case GEU: return "bgeu\t%0";
722 case LTU: return "bltu\t%0";
723 case GTU: return "bgtu\t%0";
724 case LEU: return "bleu\t%0";
725 case EQ: return "beq\t%0";
726 case NE: return "bne\t%0";
727 default:
728 gcc_unreachable ();
732 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
734 static inline unsigned int
735 rx_round_up (unsigned int value, unsigned int alignment)
737 alignment -= 1;
738 return (value + alignment) & (~ alignment);
741 /* Return the number of bytes in the argument registers
742 occupied by an argument of type TYPE and mode MODE. */
744 unsigned int
745 rx_function_arg_size (Mmode mode, const_tree type)
747 unsigned int num_bytes;
749 num_bytes = (mode == BLKmode)
750 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
751 return rx_round_up (num_bytes, UNITS_PER_WORD);
754 #define NUM_ARG_REGS 4
755 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
757 /* Return an RTL expression describing the register holding a function
758 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
759 be passed on the stack. CUM describes the previous parameters to the
760 function and NAMED is false if the parameter is part of a variable
761 parameter list, or the last named parameter before the start of a
762 variable parameter list. */
765 rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
767 unsigned int next_reg;
768 unsigned int bytes_so_far = *cum;
769 unsigned int size;
770 unsigned int rounded_size;
772 /* An exploded version of rx_function_arg_size. */
773 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
775 rounded_size = rx_round_up (size, UNITS_PER_WORD);
777 /* Don't pass this arg via registers if there
778 are insufficient registers to hold all of it. */
779 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
780 return NULL_RTX;
782 /* Unnamed arguments and the last named argument in a
783 variadic function are always passed on the stack. */
784 if (!named)
785 return NULL_RTX;
787 /* Structures must occupy an exact number of registers,
788 otherwise they are passed on the stack. */
789 if ((type == NULL || AGGREGATE_TYPE_P (type))
790 && (size % UNITS_PER_WORD) != 0)
791 return NULL_RTX;
793 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
795 return gen_rtx_REG (mode, next_reg);
798 /* Return an RTL describing where a function return value of type RET_TYPE
799 is held. */
801 static rtx
802 rx_function_value (const_tree ret_type,
803 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
804 bool outgoing ATTRIBUTE_UNUSED)
806 return gen_rtx_REG (TYPE_MODE (ret_type), FUNC_RETURN_REGNUM);
809 static bool
810 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
812 HOST_WIDE_INT size;
814 if (TYPE_MODE (type) != BLKmode
815 && ! AGGREGATE_TYPE_P (type))
816 return false;
818 size = int_size_in_bytes (type);
819 /* Large structs and those whose size is not an
820 exact multiple of 4 are returned in memory. */
821 return size < 1
822 || size > 16
823 || (size % UNITS_PER_WORD) != 0;
826 static rtx
827 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
828 int incoming ATTRIBUTE_UNUSED)
830 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
833 static bool
834 rx_return_in_msb (const_tree valtype)
836 return TARGET_BIG_ENDIAN_DATA
837 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
840 /* Returns true if the provided function has the specified attribute. */
842 static inline bool
843 has_func_attr (const_tree decl, const char * func_attr)
845 if (decl == NULL_TREE)
846 decl = current_function_decl;
848 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
851 /* Returns true if the provided function has the "fast_interrupt" attribute. */
853 static inline bool
854 is_fast_interrupt_func (const_tree decl)
856 return has_func_attr (decl, "fast_interrupt");
859 /* Returns true if the provided function has the "interrupt" attribute. */
861 static inline bool
862 is_interrupt_func (const_tree decl)
864 return has_func_attr (decl, "interrupt");
867 /* Returns true if the provided function has the "naked" attribute. */
869 static inline bool
870 is_naked_func (const_tree decl)
872 return has_func_attr (decl, "naked");
875 static bool use_fixed_regs = false;
877 void
878 rx_conditional_register_usage (void)
880 static bool using_fixed_regs = false;
882 if (rx_small_data_limit > 0)
883 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
885 if (use_fixed_regs != using_fixed_regs)
887 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
888 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
890 if (use_fixed_regs)
892 unsigned int r;
894 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
895 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
897 /* This is for fast interrupt handlers. Any register in
898 the range r10 to r13 (inclusive) that is currently
899 marked as fixed is now a viable, call-used register. */
900 for (r = 10; r <= 13; r++)
901 if (fixed_regs[r])
903 fixed_regs[r] = 0;
904 call_used_regs[r] = 1;
907 /* Mark r7 as fixed. This is just a hack to avoid
908 altering the reg_alloc_order array so that the newly
909 freed r10-r13 registers are the preferred registers. */
910 fixed_regs[7] = call_used_regs[7] = 1;
912 else
914 /* Restore the normal register masks. */
915 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
916 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
919 using_fixed_regs = use_fixed_regs;
923 /* Perform any actions necessary before starting to compile FNDECL.
924 For the RX we use this to make sure that we have the correct
925 set of register masks selected. If FNDECL is NULL then we are
926 compiling top level things. */
928 static void
929 rx_set_current_function (tree fndecl)
931 /* Remember the last target of rx_set_current_function. */
932 static tree rx_previous_fndecl;
933 bool prev_was_fast_interrupt;
934 bool current_is_fast_interrupt;
936 /* Only change the context if the function changes. This hook is called
937 several times in the course of compiling a function, and we don't want
938 to slow things down too much or call target_reinit when it isn't safe. */
939 if (fndecl == rx_previous_fndecl)
940 return;
942 prev_was_fast_interrupt
943 = rx_previous_fndecl
944 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
946 current_is_fast_interrupt
947 = fndecl ? is_fast_interrupt_func (fndecl) : false;
949 if (prev_was_fast_interrupt != current_is_fast_interrupt)
951 use_fixed_regs = current_is_fast_interrupt;
952 target_reinit ();
955 rx_previous_fndecl = fndecl;
958 /* Typical stack layout should looks like this after the function's prologue:
961 -- ^
962 | | \ |
963 | | arguments saved | Increasing
964 | | on the stack | addresses
965 PARENT arg pointer -> | | /
966 -------------------------- ---- -------------------
967 CHILD |ret | return address
969 | | \
970 | | call saved
971 | | registers
972 | | /
974 | | \
975 | | local
976 | | variables
977 frame pointer -> | | /
979 | | \
980 | | outgoing | Decreasing
981 | | arguments | addresses
982 current stack pointer -> | | / |
983 -------------------------- ---- ------------------ V
984 | | */
986 static unsigned int
987 bit_count (unsigned int x)
989 const unsigned int m1 = 0x55555555;
990 const unsigned int m2 = 0x33333333;
991 const unsigned int m4 = 0x0f0f0f0f;
993 x -= (x >> 1) & m1;
994 x = (x & m2) + ((x >> 2) & m2);
995 x = (x + (x >> 4)) & m4;
996 x += x >> 8;
998 return (x + (x >> 16)) & 0x3f;
1001 #define MUST_SAVE_ACC_REGISTER \
1002 (TARGET_SAVE_ACC_REGISTER \
1003 && (is_interrupt_func (NULL_TREE) \
1004 || is_fast_interrupt_func (NULL_TREE)))
1006 /* Returns either the lowest numbered and highest numbered registers that
1007 occupy the call-saved area of the stack frame, if the registers are
1008 stored as a contiguous block, or else a bitmask of the individual
1009 registers if they are stored piecemeal.
1011 Also computes the size of the frame and the size of the outgoing
1012 arguments block (in bytes). */
1014 static void
1015 rx_get_stack_layout (unsigned int * lowest,
1016 unsigned int * highest,
1017 unsigned int * register_mask,
1018 unsigned int * frame_size,
1019 unsigned int * stack_size)
1021 unsigned int reg;
1022 unsigned int low;
1023 unsigned int high;
1024 unsigned int fixed_reg = 0;
1025 unsigned int save_mask;
1026 unsigned int pushed_mask;
1027 unsigned int unneeded_pushes;
1029 if (is_naked_func (NULL_TREE))
1031 /* Naked functions do not create their own stack frame.
1032 Instead the programmer must do that for us. */
1033 * lowest = 0;
1034 * highest = 0;
1035 * register_mask = 0;
1036 * frame_size = 0;
1037 * stack_size = 0;
1038 return;
1041 for (save_mask = high = low = 0, reg = 1; reg < FIRST_PSEUDO_REGISTER; reg++)
1043 if (df_regs_ever_live_p (reg)
1044 && (! call_used_regs[reg]
1045 /* Even call clobbered registered must
1046 be pushed inside interrupt handlers. */
1047 || is_interrupt_func (NULL_TREE)
1048 /* Likewise for fast interrupt handlers, except registers r10 -
1049 r13. These are normally call-saved, but may have been set
1050 to call-used by rx_conditional_register_usage. If so then
1051 they can be used in the fast interrupt handler without
1052 saving them on the stack. */
1053 || (is_fast_interrupt_func (NULL_TREE)
1054 && ! IN_RANGE (reg, 10, 13))))
1056 if (low == 0)
1057 low = reg;
1058 high = reg;
1060 save_mask |= 1 << reg;
1063 /* Remember if we see a fixed register
1064 after having found the low register. */
1065 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1066 fixed_reg = reg;
1069 /* If we have to save the accumulator register, make sure
1070 that at least two registers are pushed into the frame. */
1071 if (MUST_SAVE_ACC_REGISTER
1072 && bit_count (save_mask) < 2)
1074 save_mask |= (1 << 13) | (1 << 14);
1075 if (low == 0)
1076 low = 13;
1077 if (high == 0 || low == high)
1078 high = low + 1;
1081 /* Decide if it would be faster fill in the call-saved area of the stack
1082 frame using multiple PUSH instructions instead of a single PUSHM
1083 instruction.
1085 SAVE_MASK is a bitmask of the registers that must be stored in the
1086 call-save area. PUSHED_MASK is a bitmask of the registers that would
1087 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1088 is a bitmask of those registers in pushed_mask that are not in
1089 save_mask.
1091 We use a simple heuristic that says that it is better to use
1092 multiple PUSH instructions if the number of unnecessary pushes is
1093 greater than the number of necessary pushes.
1095 We also use multiple PUSH instructions if there are any fixed registers
1096 between LOW and HIGH. The only way that this can happen is if the user
1097 has specified --fixed-<reg-name> on the command line and in such
1098 circumstances we do not want to touch the fixed registers at all.
1100 FIXME: Is it worth improving this heuristic ? */
1101 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1102 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1104 if ((fixed_reg && fixed_reg <= high)
1105 || (optimize_function_for_speed_p (cfun)
1106 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1108 /* Use multiple pushes. */
1109 * lowest = 0;
1110 * highest = 0;
1111 * register_mask = save_mask;
1113 else
1115 /* Use one push multiple instruction. */
1116 * lowest = low;
1117 * highest = high;
1118 * register_mask = 0;
1121 * frame_size = rx_round_up
1122 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1124 if (crtl->args.size > 0)
1125 * frame_size += rx_round_up
1126 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1128 * stack_size = rx_round_up
1129 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1132 /* Generate a PUSHM instruction that matches the given operands. */
1134 void
1135 rx_emit_stack_pushm (rtx * operands)
1137 HOST_WIDE_INT last_reg;
1138 rtx first_push;
1140 gcc_assert (CONST_INT_P (operands[0]));
1141 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1143 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1144 first_push = XVECEXP (operands[1], 0, 1);
1145 gcc_assert (SET_P (first_push));
1146 first_push = SET_SRC (first_push);
1147 gcc_assert (REG_P (first_push));
1149 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1150 reg_names [REGNO (first_push) - last_reg],
1151 reg_names [REGNO (first_push)]);
1154 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1156 static rtx
1157 gen_rx_store_vector (unsigned int low, unsigned int high)
1159 unsigned int i;
1160 unsigned int count = (high - low) + 2;
1161 rtx vector;
1163 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1165 XVECEXP (vector, 0, 0) =
1166 gen_rtx_SET (SImode, stack_pointer_rtx,
1167 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1168 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1170 for (i = 0; i < count - 1; i++)
1171 XVECEXP (vector, 0, i + 1) =
1172 gen_rtx_SET (SImode,
1173 gen_rtx_MEM (SImode,
1174 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1175 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1176 gen_rtx_REG (SImode, high - i));
1177 return vector;
1180 /* Mark INSN as being frame related. If it is a PARALLEL
1181 then mark each element as being frame related as well. */
1183 static void
1184 mark_frame_related (rtx insn)
1186 RTX_FRAME_RELATED_P (insn) = 1;
1187 insn = PATTERN (insn);
1189 if (GET_CODE (insn) == PARALLEL)
1191 unsigned int i;
1193 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1194 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1198 void
1199 rx_expand_prologue (void)
1201 unsigned int stack_size;
1202 unsigned int frame_size;
1203 unsigned int mask;
1204 unsigned int low;
1205 unsigned int high;
1206 unsigned int reg;
1207 rtx insn;
1209 /* Naked functions use their own, programmer provided prologues. */
1210 if (is_naked_func (NULL_TREE))
1211 return;
1213 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1215 /* If we use any of the callee-saved registers, save them now. */
1216 if (mask)
1218 /* Push registers in reverse order. */
1219 for (reg = FIRST_PSEUDO_REGISTER; reg --;)
1220 if (mask & (1 << reg))
1222 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1223 mark_frame_related (insn);
1226 else if (low)
1228 if (high == low)
1229 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1230 else
1231 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1232 * UNITS_PER_WORD),
1233 gen_rx_store_vector (low, high)));
1234 mark_frame_related (insn);
1237 if (MUST_SAVE_ACC_REGISTER)
1239 unsigned int acc_high, acc_low;
1241 /* Interrupt handlers have to preserve the accumulator
1242 register if so requested by the user. Use the first
1243 two pushed registers as intermediaries. */
1244 if (mask)
1246 acc_low = acc_high = 0;
1248 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1249 if (mask & (1 << reg))
1251 if (acc_low == 0)
1252 acc_low = reg;
1253 else
1255 acc_high = reg;
1256 break;
1260 /* We have assumed that there are at least two registers pushed... */
1261 gcc_assert (acc_high != 0);
1263 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1264 We just assume that they are zero. */
1265 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1266 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1267 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1268 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1270 else
1272 acc_low = low;
1273 acc_high = low + 1;
1275 /* We have assumed that there are at least two registers pushed... */
1276 gcc_assert (acc_high <= high);
1278 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1279 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1280 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1281 gen_rx_store_vector (acc_low, acc_high)));
1284 frame_size += 2 * UNITS_PER_WORD;
1287 /* If needed, set up the frame pointer. */
1288 if (frame_pointer_needed)
1290 if (frame_size)
1291 insn = emit_insn (gen_addsi3 (frame_pointer_rtx, stack_pointer_rtx,
1292 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1293 else
1294 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1296 RTX_FRAME_RELATED_P (insn) = 1;
1299 insn = NULL_RTX;
1301 /* Allocate space for the outgoing args.
1302 If the stack frame has not already been set up then handle this as well. */
1303 if (stack_size)
1305 if (frame_size)
1307 if (frame_pointer_needed)
1308 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1309 GEN_INT (- (HOST_WIDE_INT)
1310 stack_size)));
1311 else
1312 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1313 GEN_INT (- (HOST_WIDE_INT)
1314 (frame_size + stack_size))));
1316 else
1317 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1318 GEN_INT (- (HOST_WIDE_INT) stack_size)));
1320 else if (frame_size)
1322 if (! frame_pointer_needed)
1323 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1324 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1325 else
1326 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1329 if (insn != NULL_RTX)
1330 RTX_FRAME_RELATED_P (insn) = 1;
1333 static void
1334 rx_output_function_prologue (FILE * file,
1335 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1337 if (is_fast_interrupt_func (NULL_TREE))
1338 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1340 if (is_interrupt_func (NULL_TREE))
1341 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1343 if (is_naked_func (NULL_TREE))
1344 asm_fprintf (file, "\t; Note: Naked Function\n");
1346 if (cfun->static_chain_decl != NULL)
1347 asm_fprintf (file, "\t; Note: Nested function declared "
1348 "inside another function.\n");
1350 if (crtl->calls_eh_return)
1351 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1354 /* Generate a POPM or RTSD instruction that matches the given operands. */
1356 void
1357 rx_emit_stack_popm (rtx * operands, bool is_popm)
1359 HOST_WIDE_INT stack_adjust;
1360 HOST_WIDE_INT last_reg;
1361 rtx first_push;
1363 gcc_assert (CONST_INT_P (operands[0]));
1364 stack_adjust = INTVAL (operands[0]);
1366 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1367 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1369 first_push = XVECEXP (operands[1], 0, 1);
1370 gcc_assert (SET_P (first_push));
1371 first_push = SET_DEST (first_push);
1372 gcc_assert (REG_P (first_push));
1374 if (is_popm)
1375 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1376 reg_names [REGNO (first_push)],
1377 reg_names [REGNO (first_push) + last_reg]);
1378 else
1379 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1380 (int) stack_adjust,
1381 reg_names [REGNO (first_push)],
1382 reg_names [REGNO (first_push) + last_reg]);
1385 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1387 static rtx
1388 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1390 unsigned int i;
1391 unsigned int bias = 3;
1392 unsigned int count = (high - low) + bias;
1393 rtx vector;
1395 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1397 XVECEXP (vector, 0, 0) =
1398 gen_rtx_SET (SImode, stack_pointer_rtx,
1399 plus_constant (stack_pointer_rtx, adjust));
1401 for (i = 0; i < count - 2; i++)
1402 XVECEXP (vector, 0, i + 1) =
1403 gen_rtx_SET (SImode,
1404 gen_rtx_REG (SImode, low + i),
1405 gen_rtx_MEM (SImode,
1406 i == 0 ? stack_pointer_rtx
1407 : plus_constant (stack_pointer_rtx,
1408 i * UNITS_PER_WORD)));
1410 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1412 return vector;
1415 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1417 static rtx
1418 gen_rx_popm_vector (unsigned int low, unsigned int high)
1420 unsigned int i;
1421 unsigned int count = (high - low) + 2;
1422 rtx vector;
1424 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1426 XVECEXP (vector, 0, 0) =
1427 gen_rtx_SET (SImode, stack_pointer_rtx,
1428 plus_constant (stack_pointer_rtx,
1429 (count - 1) * UNITS_PER_WORD));
1431 for (i = 0; i < count - 1; i++)
1432 XVECEXP (vector, 0, i + 1) =
1433 gen_rtx_SET (SImode,
1434 gen_rtx_REG (SImode, low + i),
1435 gen_rtx_MEM (SImode,
1436 i == 0 ? stack_pointer_rtx
1437 : plus_constant (stack_pointer_rtx,
1438 i * UNITS_PER_WORD)));
1440 return vector;
1443 void
1444 rx_expand_epilogue (bool is_sibcall)
1446 unsigned int low;
1447 unsigned int high;
1448 unsigned int frame_size;
1449 unsigned int stack_size;
1450 unsigned int register_mask;
1451 unsigned int regs_size;
1452 unsigned int reg;
1453 unsigned HOST_WIDE_INT total_size;
1455 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1456 cannot guarantee that the register holding the function address is a
1457 call-used register. If it is a call-saved register then the stack
1458 pop instructions generated in the epilogue will corrupt the address
1459 before it is used.
1461 Creating a new call-used-only register class works but then the
1462 reload pass gets stuck because it cannot always find a call-used
1463 register for spilling sibcalls.
1465 The other possible solution is for this pass to scan forward for the
1466 sibcall instruction (if it has been generated) and work out if it
1467 is an indirect sibcall using a call-saved register. If it is then
1468 the address can copied into a call-used register in this epilogue
1469 code and the sibcall instruction modified to use that register. */
1471 if (is_naked_func (NULL_TREE))
1473 gcc_assert (! is_sibcall);
1475 /* Naked functions use their own, programmer provided epilogues.
1476 But, in order to keep gcc happy we have to generate some kind of
1477 epilogue RTL. */
1478 emit_jump_insn (gen_naked_return ());
1479 return;
1482 rx_get_stack_layout (& low, & high, & register_mask,
1483 & frame_size, & stack_size);
1485 total_size = frame_size + stack_size;
1486 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1488 /* See if we are unable to use the special stack frame deconstruct and
1489 return instructions. In most cases we can use them, but the exceptions
1490 are:
1492 - Sibling calling functions deconstruct the frame but do not return to
1493 their caller. Instead they branch to their sibling and allow their
1494 return instruction to return to this function's parent.
1496 - Fast and normal interrupt handling functions have to use special
1497 return instructions.
1499 - Functions where we have pushed a fragmented set of registers into the
1500 call-save area must have the same set of registers popped. */
1501 if (is_sibcall
1502 || is_fast_interrupt_func (NULL_TREE)
1503 || is_interrupt_func (NULL_TREE)
1504 || register_mask)
1506 /* Cannot use the special instructions - deconstruct by hand. */
1507 if (total_size)
1508 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1509 GEN_INT (total_size)));
1511 if (MUST_SAVE_ACC_REGISTER)
1513 unsigned int acc_low, acc_high;
1515 /* Reverse the saving of the accumulator register onto the stack.
1516 Note we must adjust the saved "low" accumulator value as it
1517 is really the middle 32-bits of the accumulator. */
1518 if (register_mask)
1520 acc_low = acc_high = 0;
1521 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1522 if (register_mask & (1 << reg))
1524 if (acc_low == 0)
1525 acc_low = reg;
1526 else
1528 acc_high = reg;
1529 break;
1532 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1533 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1535 else
1537 acc_low = low;
1538 acc_high = low + 1;
1539 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1540 gen_rx_popm_vector (acc_low, acc_high)));
1543 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1544 gen_rtx_REG (SImode, acc_low),
1545 GEN_INT (16)));
1546 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1547 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1550 if (register_mask)
1552 for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg ++)
1553 if (register_mask & (1 << reg))
1554 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1556 else if (low)
1558 if (high == low)
1559 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1560 else
1561 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1562 gen_rx_popm_vector (low, high)));
1565 if (is_fast_interrupt_func (NULL_TREE))
1567 gcc_assert (! is_sibcall);
1568 emit_jump_insn (gen_fast_interrupt_return ());
1570 else if (is_interrupt_func (NULL_TREE))
1572 gcc_assert (! is_sibcall);
1573 emit_jump_insn (gen_exception_return ());
1575 else if (! is_sibcall)
1576 emit_jump_insn (gen_simple_return ());
1578 return;
1581 /* If we allocated space on the stack, free it now. */
1582 if (total_size)
1584 unsigned HOST_WIDE_INT rtsd_size;
1586 /* See if we can use the RTSD instruction. */
1587 rtsd_size = total_size + regs_size;
1588 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1590 if (low)
1591 emit_jump_insn (gen_pop_and_return
1592 (GEN_INT (rtsd_size),
1593 gen_rx_rtsd_vector (rtsd_size, low, high)));
1594 else
1595 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1597 return;
1600 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1601 GEN_INT (total_size)));
1604 if (low)
1605 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1606 gen_rx_rtsd_vector (regs_size,
1607 low, high)));
1608 else
1609 emit_jump_insn (gen_simple_return ());
1613 /* Compute the offset (in words) between FROM (arg pointer
1614 or frame pointer) and TO (frame pointer or stack pointer).
1615 See ASCII art comment at the start of rx_expand_prologue
1616 for more information. */
1619 rx_initial_elimination_offset (int from, int to)
1621 unsigned int low;
1622 unsigned int high;
1623 unsigned int frame_size;
1624 unsigned int stack_size;
1625 unsigned int mask;
1627 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1629 if (from == ARG_POINTER_REGNUM)
1631 /* Extend the computed size of the stack frame to
1632 include the registers pushed in the prologue. */
1633 if (low)
1634 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1635 else
1636 frame_size += bit_count (mask) * UNITS_PER_WORD;
1638 /* Remember to include the return address. */
1639 frame_size += 1 * UNITS_PER_WORD;
1641 if (to == FRAME_POINTER_REGNUM)
1642 return frame_size;
1644 gcc_assert (to == STACK_POINTER_REGNUM);
1645 return frame_size + stack_size;
1648 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1649 return stack_size;
1652 /* Update the status of the condition
1653 codes (cc0) based on the given INSN. */
1655 void
1656 rx_notice_update_cc (rtx body, rtx insn)
1658 switch (get_attr_cc (insn))
1660 case CC_NONE:
1661 /* Insn does not affect cc0 at all. */
1662 break;
1663 case CC_CLOBBER:
1664 /* Insn doesn't leave cc0 in a usable state. */
1665 CC_STATUS_INIT;
1666 break;
1667 case CC_SET_ZSOC:
1668 /* The insn sets all the condition code bits. */
1669 CC_STATUS_INIT;
1670 cc_status.value1 = SET_SRC (body);
1671 break;
1672 case CC_SET_ZSO:
1673 /* Insn sets the Z,S and O flags, but not the C flag. */
1674 CC_STATUS_INIT;
1675 cc_status.flags |= CC_NO_CARRY;
1676 /* Do not set the value1 field in this case. The final_scan_insn()
1677 function naively believes that if cc_status.value1 is set then
1678 it can eliminate *any* comparison against that value, even if
1679 the type of comparison cannot be satisfied by the range of flag
1680 bits being set here. See gcc.c-torture/execute/20041210-1.c
1681 for an example of this in action. */
1682 break;
1683 case CC_SET_ZS:
1684 /* Insn sets the Z and S flags, but not the O or C flags. */
1685 CC_STATUS_INIT;
1686 cc_status.flags |= (CC_NO_CARRY | CC_NO_OVERFLOW);
1687 /* See comment above regarding cc_status.value1. */
1688 break;
1689 default:
1690 gcc_unreachable ();
1694 /* Decide if a variable should go into one of the small data sections. */
1696 static bool
1697 rx_in_small_data (const_tree decl)
1699 int size;
1700 const_tree section;
1702 if (rx_small_data_limit == 0)
1703 return false;
1705 if (TREE_CODE (decl) != VAR_DECL)
1706 return false;
1708 /* We do not put read-only variables into a small data area because
1709 they would be placed with the other read-only sections, far away
1710 from the read-write data sections, and we only have one small
1711 data area pointer.
1712 Similarly commons are placed in the .bss section which might be
1713 far away (and out of alignment with respect to) the .data section. */
1714 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1715 return false;
1717 section = DECL_SECTION_NAME (decl);
1718 if (section)
1720 const char * const name = TREE_STRING_POINTER (section);
1722 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1725 size = int_size_in_bytes (TREE_TYPE (decl));
1727 return (size > 0) && (size <= rx_small_data_limit);
1730 /* Return a section for X.
1731 The only special thing we do here is to honor small data. */
1733 static section *
1734 rx_select_rtx_section (enum machine_mode mode,
1735 rtx x,
1736 unsigned HOST_WIDE_INT align)
1738 if (rx_small_data_limit > 0
1739 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1740 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1741 return sdata_section;
1743 return default_elf_select_rtx_section (mode, x, align);
1746 static section *
1747 rx_select_section (tree decl,
1748 int reloc,
1749 unsigned HOST_WIDE_INT align)
1751 if (rx_small_data_limit > 0)
1753 switch (categorize_decl_for_section (decl, reloc))
1755 case SECCAT_SDATA: return sdata_section;
1756 case SECCAT_SBSS: return sbss_section;
1757 case SECCAT_SRODATA:
1758 /* Fall through. We do not put small, read only
1759 data into the C_2 section because we are not
1760 using the C_2 section. We do not use the C_2
1761 section because it is located with the other
1762 read-only data sections, far away from the read-write
1763 data sections and we only have one small data
1764 pointer (r13). */
1765 default:
1766 break;
1770 /* If we are supporting the Renesas assembler
1771 we cannot use mergeable sections. */
1772 if (TARGET_AS100_SYNTAX)
1773 switch (categorize_decl_for_section (decl, reloc))
1775 case SECCAT_RODATA_MERGE_CONST:
1776 case SECCAT_RODATA_MERGE_STR_INIT:
1777 case SECCAT_RODATA_MERGE_STR:
1778 return readonly_data_section;
1780 default:
1781 break;
1784 return default_elf_select_section (decl, reloc, align);
1787 enum rx_builtin
1789 RX_BUILTIN_BRK,
1790 RX_BUILTIN_CLRPSW,
1791 RX_BUILTIN_INT,
1792 RX_BUILTIN_MACHI,
1793 RX_BUILTIN_MACLO,
1794 RX_BUILTIN_MULHI,
1795 RX_BUILTIN_MULLO,
1796 RX_BUILTIN_MVFACHI,
1797 RX_BUILTIN_MVFACMI,
1798 RX_BUILTIN_MVFC,
1799 RX_BUILTIN_MVTACHI,
1800 RX_BUILTIN_MVTACLO,
1801 RX_BUILTIN_MVTC,
1802 RX_BUILTIN_MVTIPL,
1803 RX_BUILTIN_RACW,
1804 RX_BUILTIN_REVW,
1805 RX_BUILTIN_RMPA,
1806 RX_BUILTIN_ROUND,
1807 RX_BUILTIN_SAT,
1808 RX_BUILTIN_SETPSW,
1809 RX_BUILTIN_WAIT,
1810 RX_BUILTIN_max
1813 static void
1814 rx_init_builtins (void)
1816 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1817 add_builtin_function ("__builtin_rx_" LC_NAME, \
1818 build_function_type_list (RET_TYPE##_type_node, \
1819 ARG_TYPE##_type_node, \
1820 NULL_TREE), \
1821 RX_BUILTIN_##UC_NAME, \
1822 BUILT_IN_MD, NULL, NULL_TREE)
1824 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1825 add_builtin_function ("__builtin_rx_" LC_NAME, \
1826 build_function_type_list (RET_TYPE##_type_node, \
1827 ARG_TYPE1##_type_node,\
1828 ARG_TYPE2##_type_node,\
1829 NULL_TREE), \
1830 RX_BUILTIN_##UC_NAME, \
1831 BUILT_IN_MD, NULL, NULL_TREE)
1833 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1834 add_builtin_function ("__builtin_rx_" LC_NAME, \
1835 build_function_type_list (RET_TYPE##_type_node, \
1836 ARG_TYPE1##_type_node,\
1837 ARG_TYPE2##_type_node,\
1838 ARG_TYPE3##_type_node,\
1839 NULL_TREE), \
1840 RX_BUILTIN_##UC_NAME, \
1841 BUILT_IN_MD, NULL, NULL_TREE)
1843 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1844 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1845 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1846 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1847 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1848 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1849 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1850 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1851 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1852 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1853 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1854 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1855 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1856 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1857 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
1858 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
1859 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1860 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1861 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1862 ADD_RX_BUILTIN1 (SAT, "sat", intSI, intSI);
1863 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1866 static rtx
1867 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1869 if (reg && ! REG_P (arg))
1870 arg = force_reg (SImode, arg);
1872 emit_insn (gen_func (arg));
1874 return NULL_RTX;
1877 static rtx
1878 rx_expand_builtin_mvtc (tree exp)
1880 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1881 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1883 if (! CONST_INT_P (arg1))
1884 return NULL_RTX;
1886 if (! REG_P (arg2))
1887 arg2 = force_reg (SImode, arg2);
1889 emit_insn (gen_mvtc (arg1, arg2));
1891 return NULL_RTX;
1894 static rtx
1895 rx_expand_builtin_mvfc (tree t_arg, rtx target)
1897 rtx arg = expand_normal (t_arg);
1899 if (! CONST_INT_P (arg))
1900 return NULL_RTX;
1902 if (target == NULL_RTX)
1903 return NULL_RTX;
1905 if (! REG_P (target))
1906 target = force_reg (SImode, target);
1908 emit_insn (gen_mvfc (target, arg));
1910 return target;
1913 static rtx
1914 rx_expand_builtin_mvtipl (rtx arg)
1916 /* The RX610 does not support the MVTIPL instruction. */
1917 if (rx_cpu_type == RX610)
1918 return NULL_RTX;
1920 if (! CONST_INT_P (arg) || ! IN_RANGE (arg, 0, (1 << 4) - 1))
1921 return NULL_RTX;
1923 emit_insn (gen_mvtipl (arg));
1925 return NULL_RTX;
1928 static rtx
1929 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
1931 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1932 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1934 if (! REG_P (arg1))
1935 arg1 = force_reg (SImode, arg1);
1937 if (! REG_P (arg2))
1938 arg2 = force_reg (SImode, arg2);
1940 emit_insn (gen_func (arg1, arg2));
1942 return NULL_RTX;
1945 static rtx
1946 rx_expand_int_builtin_1_arg (rtx arg,
1947 rtx target,
1948 rtx (* gen_func)(rtx, rtx),
1949 bool mem_ok)
1951 if (! REG_P (arg))
1952 if (!mem_ok || ! MEM_P (arg))
1953 arg = force_reg (SImode, arg);
1955 if (target == NULL_RTX || ! REG_P (target))
1956 target = gen_reg_rtx (SImode);
1958 emit_insn (gen_func (target, arg));
1960 return target;
1963 static rtx
1964 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
1966 if (target == NULL_RTX || ! REG_P (target))
1967 target = gen_reg_rtx (SImode);
1969 emit_insn (gen_func (target));
1971 return target;
1974 static rtx
1975 rx_expand_builtin_round (rtx arg, rtx target)
1977 if ((! REG_P (arg) && ! MEM_P (arg))
1978 || GET_MODE (arg) != SFmode)
1979 arg = force_reg (SFmode, arg);
1981 if (target == NULL_RTX || ! REG_P (target))
1982 target = gen_reg_rtx (SImode);
1984 emit_insn (gen_lrintsf2 (target, arg));
1986 return target;
1989 static rtx
1990 rx_expand_builtin (tree exp,
1991 rtx target,
1992 rtx subtarget ATTRIBUTE_UNUSED,
1993 enum machine_mode mode ATTRIBUTE_UNUSED,
1994 int ignore ATTRIBUTE_UNUSED)
1996 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1997 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
1998 rtx op = arg ? expand_normal (arg) : NULL_RTX;
1999 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2001 switch (fcode)
2003 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2004 case RX_BUILTIN_CLRPSW: return rx_expand_void_builtin_1_arg
2005 (op, gen_clrpsw, false);
2006 case RX_BUILTIN_SETPSW: return rx_expand_void_builtin_1_arg
2007 (op, gen_setpsw, false);
2008 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2009 (op, gen_int, false);
2010 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2011 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2012 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2013 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2014 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2015 (target, gen_mvfachi);
2016 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2017 (target, gen_mvfacmi);
2018 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2019 (op, gen_mvtachi, true);
2020 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2021 (op, gen_mvtaclo, true);
2022 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2023 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2024 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2025 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2026 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2027 (op, gen_racw, false);
2028 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2029 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2030 (op, target, gen_revw, false);
2031 case RX_BUILTIN_SAT: return rx_expand_int_builtin_1_arg
2032 (op, target, gen_sat, false);
2033 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2035 default:
2036 internal_error ("bad builtin code");
2037 break;
2040 return NULL_RTX;
2043 /* Place an element into a constructor or destructor section.
2044 Like default_ctor_section_asm_out_constructor in varasm.c
2045 except that it uses .init_array (or .fini_array) and it
2046 handles constructor priorities. */
2048 static void
2049 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2051 section * s;
2053 if (priority != DEFAULT_INIT_PRIORITY)
2055 char buf[18];
2057 sprintf (buf, "%s.%.5u",
2058 is_ctor ? ".init_array" : ".fini_array",
2059 priority);
2060 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2062 else if (is_ctor)
2063 s = ctors_section;
2064 else
2065 s = dtors_section;
2067 switch_to_section (s);
2068 assemble_align (POINTER_SIZE);
2069 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2072 static void
2073 rx_elf_asm_constructor (rtx symbol, int priority)
2075 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2078 static void
2079 rx_elf_asm_destructor (rtx symbol, int priority)
2081 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2084 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2086 static tree
2087 rx_handle_func_attribute (tree * node,
2088 tree name,
2089 tree args,
2090 int flags ATTRIBUTE_UNUSED,
2091 bool * no_add_attrs)
2093 gcc_assert (DECL_P (* node));
2094 gcc_assert (args == NULL_TREE);
2096 if (TREE_CODE (* node) != FUNCTION_DECL)
2098 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2099 name);
2100 * no_add_attrs = true;
2103 /* FIXME: We ought to check for conflicting attributes. */
2105 /* FIXME: We ought to check that the interrupt and exception
2106 handler attributes have been applied to void functions. */
2107 return NULL_TREE;
2110 /* Table of RX specific attributes. */
2111 const struct attribute_spec rx_attribute_table[] =
2113 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2114 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2115 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2116 { "naked", 0, 0, true, false, false, rx_handle_func_attribute },
2117 { NULL, 0, 0, false, false, false, NULL }
2120 /* Extra processing for target specific command line options. */
2122 static bool
2123 rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2125 switch (code)
2127 case OPT_mint_register_:
2128 switch (value)
2130 case 4:
2131 fixed_regs[10] = call_used_regs [10] = 1;
2132 /* Fall through. */
2133 case 3:
2134 fixed_regs[11] = call_used_regs [11] = 1;
2135 /* Fall through. */
2136 case 2:
2137 fixed_regs[12] = call_used_regs [12] = 1;
2138 /* Fall through. */
2139 case 1:
2140 fixed_regs[13] = call_used_regs [13] = 1;
2141 /* Fall through. */
2142 case 0:
2143 return true;
2144 default:
2145 return false;
2147 break;
2149 case OPT_mmax_constant_size_:
2150 /* Make sure that the -mmax-constant_size option is in range. */
2151 return value >= 0 && value <= 4;
2153 case OPT_mcpu_:
2154 case OPT_patch_:
2155 if (strcasecmp (arg, "RX610") == 0)
2156 rx_cpu_type = RX610;
2157 else if (strcasecmp (arg, "RX200") == 0)
2159 target_flags |= MASK_NO_USE_FPU;
2160 rx_cpu_type = RX200;
2162 else if (strcasecmp (arg, "RX600") != 0)
2163 warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
2164 break;
2166 case OPT_fpu:
2167 if (rx_cpu_type == RX200)
2168 error ("The RX200 cpu does not have FPU hardware");
2169 break;
2171 default:
2172 break;
2175 return true;
2178 void
2179 rx_set_optimization_options (void)
2181 static bool first_time = TRUE;
2182 static bool saved_allow_rx_fpu = TRUE;
2184 if (first_time)
2186 /* If this is the first time through and the user has not disabled
2187 the use of RX FPU hardware then enable unsafe math optimizations,
2188 since the FPU instructions themselves are unsafe. */
2189 if (TARGET_USE_FPU)
2190 set_fast_math_flags (true);
2192 /* FIXME: For some unknown reason LTO compression is not working,
2193 at least on my local system. So set the default compression
2194 level to none, for now. */
2195 if (flag_lto_compression_level == -1)
2196 flag_lto_compression_level = 0;
2198 saved_allow_rx_fpu = ALLOW_RX_FPU_INSNS;
2199 first_time = FALSE;
2201 else
2203 /* Alert the user if they are changing the optimization options
2204 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2205 if (TARGET_USE_FPU
2206 && ! fast_math_flags_set_p ())
2207 warning (0, "RX FPU instructions are not IEEE compliant");
2209 if (saved_allow_rx_fpu != ALLOW_RX_FPU_INSNS)
2210 error ("Changing the FPU insns/math optimizations pairing is not supported");
2215 static bool
2216 rx_allocate_stack_slots_for_args (void)
2218 /* Naked functions should not allocate stack slots for arguments. */
2219 return ! is_naked_func (NULL_TREE);
2222 static bool
2223 rx_func_attr_inlinable (const_tree decl)
2225 return ! is_fast_interrupt_func (decl)
2226 && ! is_interrupt_func (decl)
2227 && ! is_naked_func (decl);
2230 /* Return nonzero if it is ok to make a tail-call to DECL,
2231 a function_decl or NULL if this is an indirect call, using EXP */
2233 static bool
2234 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2236 /* Do not allow indirect tailcalls. The
2237 sibcall patterns do not support them. */
2238 if (decl == NULL)
2239 return false;
2241 /* Never tailcall from inside interrupt handlers or naked functions. */
2242 if (is_fast_interrupt_func (NULL_TREE)
2243 || is_interrupt_func (NULL_TREE)
2244 || is_naked_func (NULL_TREE))
2245 return false;
2247 return true;
2250 static void
2251 rx_file_start (void)
2253 if (! TARGET_AS100_SYNTAX)
2254 default_file_start ();
2257 static bool
2258 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2260 return TRUE;
2263 /* Try to generate code for the "isnv" pattern which inserts bits
2264 into a word.
2265 operands[0] => Location to be altered.
2266 operands[1] => Number of bits to change.
2267 operands[2] => Starting bit.
2268 operands[3] => Value to insert.
2269 Returns TRUE if successful, FALSE otherwise. */
2271 bool
2272 rx_expand_insv (rtx * operands)
2274 if (INTVAL (operands[1]) != 1
2275 || ! CONST_INT_P (operands[3]))
2276 return false;
2278 if (MEM_P (operands[0])
2279 && INTVAL (operands[2]) > 7)
2280 return false;
2282 switch (INTVAL (operands[3]))
2284 case 0:
2285 if (MEM_P (operands[0]))
2286 emit_insn (gen_bitclr_in_memory (operands[0], operands[0],
2287 operands[2]));
2288 else
2289 emit_insn (gen_bitclr (operands[0], operands[0], operands[2]));
2290 break;
2291 case 1:
2292 case -1:
2293 if (MEM_P (operands[0]))
2294 emit_insn (gen_bitset_in_memory (operands[0], operands[0],
2295 operands[2]));
2296 else
2297 emit_insn (gen_bitset (operands[0], operands[0], operands[2]));
2298 break;
2299 default:
2300 return false;
2302 return true;
2305 /* Returns true if X a legitimate constant for an immediate
2306 operand on the RX. X is already known to satisfy CONSTANT_P. */
2308 bool
2309 rx_is_legitimate_constant (rtx x)
2311 HOST_WIDE_INT val;
2313 switch (GET_CODE (x))
2315 case CONST:
2316 x = XEXP (x, 0);
2318 if (GET_CODE (x) == PLUS)
2320 if (! CONST_INT_P (XEXP (x, 1)))
2321 return false;
2323 /* GCC would not pass us CONST_INT + CONST_INT so we
2324 know that we have {SYMBOL|LABEL} + CONST_INT. */
2325 x = XEXP (x, 0);
2326 gcc_assert (! CONST_INT_P (x));
2329 switch (GET_CODE (x))
2331 case LABEL_REF:
2332 case SYMBOL_REF:
2333 return true;
2335 /* One day we may have to handle UNSPEC constants here. */
2336 default:
2337 /* FIXME: Can this ever happen ? */
2338 abort ();
2339 return false;
2341 break;
2343 case LABEL_REF:
2344 case SYMBOL_REF:
2345 return true;
2346 case CONST_DOUBLE:
2347 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2348 case CONST_VECTOR:
2349 return false;
2350 default:
2351 gcc_assert (CONST_INT_P (x));
2352 break;
2355 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
2356 /* If there is no constraint on the size of constants
2357 used as operands, then any value is legitimate. */
2358 return true;
2360 val = INTVAL (x);
2362 /* rx_max_constant_size specifies the maximum number
2363 of bytes that can be used to hold a signed value. */
2364 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
2365 ( 1 << (rx_max_constant_size * 8)));
2368 static int
2369 rx_address_cost (rtx addr, bool speed)
2371 rtx a, b;
2373 if (GET_CODE (addr) != PLUS)
2374 return COSTS_N_INSNS (1);
2376 a = XEXP (addr, 0);
2377 b = XEXP (addr, 1);
2379 if (REG_P (a) && REG_P (b))
2380 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2381 return COSTS_N_INSNS (4);
2383 if (speed)
2384 /* [REG+OFF] is just as fast as [REG]. */
2385 return COSTS_N_INSNS (1);
2387 if (CONST_INT_P (b)
2388 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2389 /* Try to discourage REG + <large OFF> when optimizing for size. */
2390 return COSTS_N_INSNS (2);
2392 return COSTS_N_INSNS (1);
2395 static bool
2396 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2398 /* We can always eliminate to the frame pointer.
2399 We can eliminate to the stack pointer unless a frame
2400 pointer is needed. */
2402 return to == FRAME_POINTER_REGNUM
2403 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2407 static void
2408 rx_trampoline_template (FILE * file)
2410 /* Output assembler code for a block containing the constant
2411 part of a trampoline, leaving space for the variable parts.
2413 On the RX, (where r8 is the static chain regnum) the trampoline
2414 looks like:
2416 mov #<static chain value>, r8
2417 mov #<function's address>, r9
2418 jmp r9
2420 In big-endian-data-mode however instructions are read into the CPU
2421 4 bytes at a time. These bytes are then swapped around before being
2422 passed to the decoder. So...we must partition our trampoline into
2423 4 byte packets and swap these packets around so that the instruction
2424 reader will reverse the process. But, in order to avoid splitting
2425 the 32-bit constants across these packet boundaries, (making inserting
2426 them into the constructed trampoline very difficult) we have to pad the
2427 instruction sequence with NOP insns. ie:
2431 mov.l #<...>, r8
2434 mov.l #<...>, r9
2435 jmp r9
2437 nop */
2439 if (! TARGET_BIG_ENDIAN_DATA)
2441 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2442 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2443 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2445 else
2447 char r8 = '0' + STATIC_CHAIN_REGNUM;
2448 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2450 if (TARGET_AS100_SYNTAX)
2452 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2453 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2454 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2455 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2456 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2458 else
2460 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2461 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2462 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2463 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2464 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2469 static void
2470 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2472 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2474 emit_block_move (tramp, assemble_trampoline_template (),
2475 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2477 if (TARGET_BIG_ENDIAN_DATA)
2479 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2480 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2482 else
2484 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2485 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2489 #undef TARGET_FUNCTION_VALUE
2490 #define TARGET_FUNCTION_VALUE rx_function_value
2492 #undef TARGET_RETURN_IN_MSB
2493 #define TARGET_RETURN_IN_MSB rx_return_in_msb
2495 #undef TARGET_IN_SMALL_DATA_P
2496 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
2498 #undef TARGET_RETURN_IN_MEMORY
2499 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2501 #undef TARGET_HAVE_SRODATA_SECTION
2502 #define TARGET_HAVE_SRODATA_SECTION true
2504 #undef TARGET_ASM_SELECT_RTX_SECTION
2505 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2507 #undef TARGET_ASM_SELECT_SECTION
2508 #define TARGET_ASM_SELECT_SECTION rx_select_section
2510 #undef TARGET_INIT_BUILTINS
2511 #define TARGET_INIT_BUILTINS rx_init_builtins
2513 #undef TARGET_EXPAND_BUILTIN
2514 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
2516 #undef TARGET_ASM_CONSTRUCTOR
2517 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2519 #undef TARGET_ASM_DESTRUCTOR
2520 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2522 #undef TARGET_STRUCT_VALUE_RTX
2523 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2525 #undef TARGET_ATTRIBUTE_TABLE
2526 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2528 #undef TARGET_ASM_FILE_START
2529 #define TARGET_ASM_FILE_START rx_file_start
2531 #undef TARGET_MS_BITFIELD_LAYOUT_P
2532 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2534 #undef TARGET_LEGITIMATE_ADDRESS_P
2535 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2537 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2538 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2540 #undef TARGET_ASM_FUNCTION_PROLOGUE
2541 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2543 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2544 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2546 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2547 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
2549 #undef TARGET_SET_CURRENT_FUNCTION
2550 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2552 #undef TARGET_HANDLE_OPTION
2553 #define TARGET_HANDLE_OPTION rx_handle_option
2555 #undef TARGET_ASM_INTEGER
2556 #define TARGET_ASM_INTEGER rx_assemble_integer
2558 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2559 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2561 #undef TARGET_MAX_ANCHOR_OFFSET
2562 #define TARGET_MAX_ANCHOR_OFFSET 32
2564 #undef TARGET_ADDRESS_COST
2565 #define TARGET_ADDRESS_COST rx_address_cost
2567 #undef TARGET_CAN_ELIMINATE
2568 #define TARGET_CAN_ELIMINATE rx_can_eliminate
2570 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2571 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
2573 #undef TARGET_TRAMPOLINE_INIT
2574 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
2576 struct gcc_target targetm = TARGET_INITIALIZER;
2578 /* #include "gt-rx.h" */