1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002,2003 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
71 /* Cached operands, and operator to compare for use in set/branch on
75 /* what type of branch to use */
76 enum cmp_type branch_type
;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok
[(int) MAX_MACHINE_MODE
][FIRST_PSEUDO_REGISTER
];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size
;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes
[(int) MAX_MACHINE_MODE
];
87 const char *xtensa_st_opcodes
[(int) MAX_MACHINE_MODE
];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function
GTY(())
93 int accesses_prev_frame
;
94 bool incoming_a7_copied
;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
101 const char xtensa_leaf_regs
[FIRST_PSEUDO_REGISTER
] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class
[FIRST_PSEUDO_REGISTER
] =
112 RL_REGS
, SP_REG
, RL_REGS
, RL_REGS
,
113 RL_REGS
, RL_REGS
, RL_REGS
, GR_REGS
,
114 RL_REGS
, RL_REGS
, RL_REGS
, RL_REGS
,
115 RL_REGS
, RL_REGS
, RL_REGS
, RL_REGS
,
116 AR_REGS
, AR_REGS
, BR_REGS
,
117 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
118 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
119 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
120 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class
[256] =
127 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
128 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
129 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
130 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
131 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
132 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
133 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
134 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
135 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
136 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
137 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
138 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
139 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
140 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
141 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
142 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
143 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
144 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
145 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
146 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
147 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
148 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
149 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
150 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
151 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
152 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
153 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
154 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
155 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
156 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
157 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
158 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
159 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
160 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
161 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
162 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
163 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
164 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
165 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
166 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
167 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
168 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
169 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
170 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
171 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
172 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
173 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
174 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
175 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
176 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
177 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
178 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
179 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
180 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
181 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
182 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
183 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
184 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
185 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
186 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
187 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
188 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
189 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
190 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
193 static int b4const_or_zero (int);
194 static enum internal_test
map_test_to_internal_test (enum rtx_code
);
195 static rtx
gen_int_relational (enum rtx_code
, rtx
, rtx
, int *);
196 static rtx
gen_float_relational (enum rtx_code
, rtx
, rtx
);
197 static rtx
gen_conditional_move (rtx
);
198 static rtx
fixup_subreg_mem (rtx
);
199 static enum machine_mode
xtensa_find_mode_for_size (unsigned);
200 static struct machine_function
* xtensa_init_machine_status (void);
201 static void printx (FILE *, signed int);
202 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT
);
203 static unsigned int xtensa_multibss_section_type_flags (tree
, const char *,
204 int) ATTRIBUTE_UNUSED
;
205 static void xtensa_select_rtx_section (enum machine_mode
, rtx
,
206 unsigned HOST_WIDE_INT
);
207 static bool xtensa_rtx_costs (rtx
, int, int, int *);
208 static tree
xtensa_build_builtin_va_list (void);
210 static int current_function_arg_words
;
211 static const int reg_nonleaf_alloc_order
[FIRST_PSEUDO_REGISTER
] =
215 /* This macro generates the assembly code for function exit,
216 on machines that need it. If FUNCTION_EPILOGUE is not defined
217 then individual return instructions are generated for each
218 return statement. Args are same as for FUNCTION_PROLOGUE. */
220 #undef TARGET_ASM_FUNCTION_EPILOGUE
221 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
223 /* These hooks specify assembly directives for creating certain kinds
224 of integer object. */
226 #undef TARGET_ASM_ALIGNED_SI_OP
227 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
229 #undef TARGET_ASM_SELECT_RTX_SECTION
230 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
232 #undef TARGET_RTX_COSTS
233 #define TARGET_RTX_COSTS xtensa_rtx_costs
234 #undef TARGET_ADDRESS_COST
235 #define TARGET_ADDRESS_COST hook_int_rtx_0
237 #undef TARGET_BUILD_BUILTIN_VA_LIST
238 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
240 struct gcc_target targetm
= TARGET_INITIALIZER
;
244 * Functions to test Xtensa immediate operand validity.
248 xtensa_b4constu (int v
)
274 xtensa_simm8x256 (int v
)
276 return (v
& 255) == 0 && (v
>= -32768 && v
<= 32512);
280 xtensa_ai4const (int v
)
282 return (v
== -1 || (v
>= 1 && v
<= 15));
288 return v
>= -32 && v
<= 95;
292 xtensa_b4const (int v
)
320 return v
>= -128 && v
<= 127;
326 return (v
>= 7 && v
<= 22);
330 xtensa_lsi4x4 (int v
)
332 return (v
& 3) == 0 && (v
>= 0 && v
<= 60);
336 xtensa_simm12b (int v
)
338 return v
>= -2048 && v
<= 2047;
344 return v
>= 0 && v
<= 255;
348 xtensa_uimm8x2 (int v
)
350 return (v
& 1) == 0 && (v
>= 0 && v
<= 510);
354 xtensa_uimm8x4 (int v
)
356 return (v
& 3) == 0 && (v
>= 0 && v
<= 1020);
360 /* This is just like the standard true_regnum() function except that it
361 works even when reg_renumber is not initialized. */
364 xt_true_regnum (rtx x
)
366 if (GET_CODE (x
) == REG
)
369 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
370 && reg_renumber
[REGNO (x
)] >= 0)
371 return reg_renumber
[REGNO (x
)];
374 if (GET_CODE (x
) == SUBREG
)
376 int base
= xt_true_regnum (SUBREG_REG (x
));
377 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
378 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
379 GET_MODE (SUBREG_REG (x
)),
380 SUBREG_BYTE (x
), GET_MODE (x
));
387 add_operand (rtx op
, enum machine_mode mode
)
389 if (GET_CODE (op
) == CONST_INT
)
390 return (xtensa_simm8 (INTVAL (op
)) || xtensa_simm8x256 (INTVAL (op
)));
392 return register_operand (op
, mode
);
397 arith_operand (rtx op
, enum machine_mode mode
)
399 if (GET_CODE (op
) == CONST_INT
)
400 return xtensa_simm8 (INTVAL (op
));
402 return register_operand (op
, mode
);
407 nonimmed_operand (rtx op
, enum machine_mode mode
)
409 /* We cannot use the standard nonimmediate_operand() predicate because
410 it includes constant pool memory operands. */
412 if (memory_operand (op
, mode
))
413 return !constantpool_address_p (XEXP (op
, 0));
415 return register_operand (op
, mode
);
420 mem_operand (rtx op
, enum machine_mode mode
)
422 /* We cannot use the standard memory_operand() predicate because
423 it includes constant pool memory operands. */
425 if (memory_operand (op
, mode
))
426 return !constantpool_address_p (XEXP (op
, 0));
433 xtensa_valid_move (enum machine_mode mode
, rtx
*operands
)
435 /* Either the destination or source must be a register, and the
436 MAC16 accumulator doesn't count. */
438 if (register_operand (operands
[0], mode
))
440 int dst_regnum
= xt_true_regnum (operands
[0]);
442 /* The stack pointer can only be assigned with a MOVSP opcode. */
443 if (dst_regnum
== STACK_POINTER_REGNUM
)
444 return (mode
== SImode
445 && register_operand (operands
[1], mode
)
446 && !ACC_REG_P (xt_true_regnum (operands
[1])));
448 if (!ACC_REG_P (dst_regnum
))
451 if (register_operand (operands
[1], mode
))
453 int src_regnum
= xt_true_regnum (operands
[1]);
454 if (!ACC_REG_P (src_regnum
))
462 mask_operand (rtx op
, enum machine_mode mode
)
464 if (GET_CODE (op
) == CONST_INT
)
465 return xtensa_mask_immediate (INTVAL (op
));
467 return register_operand (op
, mode
);
472 extui_fldsz_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
474 return ((GET_CODE (op
) == CONST_INT
)
475 && xtensa_mask_immediate ((1 << INTVAL (op
)) - 1));
480 sext_operand (rtx op
, enum machine_mode mode
)
483 return nonimmed_operand (op
, mode
);
484 return mem_operand (op
, mode
);
489 sext_fldsz_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
491 return ((GET_CODE (op
) == CONST_INT
) && xtensa_tp7 (INTVAL (op
) - 1));
496 lsbitnum_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
498 if (GET_CODE (op
) == CONST_INT
)
500 return (BITS_BIG_ENDIAN
501 ? (INTVAL (op
) == BITS_PER_WORD
-1)
502 : (INTVAL (op
) == 0));
509 b4const_or_zero (int v
)
513 return xtensa_b4const (v
);
518 branch_operand (rtx op
, enum machine_mode mode
)
520 if (GET_CODE (op
) == CONST_INT
)
521 return b4const_or_zero (INTVAL (op
));
523 return register_operand (op
, mode
);
528 ubranch_operand (rtx op
, enum machine_mode mode
)
530 if (GET_CODE (op
) == CONST_INT
)
531 return xtensa_b4constu (INTVAL (op
));
533 return register_operand (op
, mode
);
538 call_insn_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
540 if ((GET_CODE (op
) == REG
)
541 && (op
!= arg_pointer_rtx
)
542 && ((REGNO (op
) < FRAME_POINTER_REGNUM
)
543 || (REGNO (op
) > LAST_VIRTUAL_REGISTER
)))
546 if (CONSTANT_ADDRESS_P (op
))
548 /* Direct calls only allowed to static functions with PIC. */
551 tree callee
, callee_sec
, caller_sec
;
553 if (GET_CODE (op
) != SYMBOL_REF
|| !SYMBOL_REF_LOCAL_P (op
))
556 /* Don't attempt a direct call if the callee is known to be in
557 a different section, since there's a good chance it will be
560 if (flag_function_sections
561 || DECL_ONE_ONLY (current_function_decl
))
563 caller_sec
= DECL_SECTION_NAME (current_function_decl
);
564 callee
= SYMBOL_REF_DECL (op
);
567 if (DECL_ONE_ONLY (callee
))
569 callee_sec
= DECL_SECTION_NAME (callee
);
570 if (((caller_sec
== NULL_TREE
) ^ (callee_sec
== NULL_TREE
))
571 || (caller_sec
!= NULL_TREE
572 && strcmp (TREE_STRING_POINTER (caller_sec
),
573 TREE_STRING_POINTER (callee_sec
)) != 0))
576 else if (caller_sec
!= NULL_TREE
)
587 move_operand (rtx op
, enum machine_mode mode
)
589 if (register_operand (op
, mode
)
590 || memory_operand (op
, mode
))
597 return TARGET_CONST16
&& CONSTANT_P (op
);
602 return CONSTANT_P (op
);
607 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
609 if (GET_CODE (op
) == CONSTANT_P_RTX
)
612 if (GET_CODE (op
) == CONST_INT
&& xtensa_simm12b (INTVAL (op
)))
625 smalloffset_mem_p (rtx op
)
627 if (GET_CODE (op
) == MEM
)
629 rtx addr
= XEXP (op
, 0);
630 if (GET_CODE (addr
) == REG
)
631 return REG_OK_FOR_BASE_P (addr
);
632 if (GET_CODE (addr
) == PLUS
)
634 rtx offset
= XEXP (addr
, 0);
635 if (GET_CODE (offset
) != CONST_INT
)
636 offset
= XEXP (addr
, 1);
637 if (GET_CODE (offset
) != CONST_INT
)
639 return xtensa_lsi4x4 (INTVAL (offset
));
647 constantpool_address_p (rtx addr
)
651 if (GET_CODE (addr
) == CONST
)
655 /* Only handle (PLUS (SYM, OFFSET)) form. */
656 addr
= XEXP (addr
, 0);
657 if (GET_CODE (addr
) != PLUS
)
660 /* Make sure the address is word aligned. */
661 offset
= XEXP (addr
, 1);
662 if ((GET_CODE (offset
) != CONST_INT
)
663 || ((INTVAL (offset
) & 3) != 0))
666 sym
= XEXP (addr
, 0);
669 if ((GET_CODE (sym
) == SYMBOL_REF
)
670 && CONSTANT_POOL_ADDRESS_P (sym
))
677 constantpool_mem_p (rtx op
)
679 if (GET_CODE (op
) == MEM
)
680 return constantpool_address_p (XEXP (op
, 0));
685 /* Accept the floating point constant 1 in the appropriate mode. */
688 const_float_1_operand (rtx op
, enum machine_mode mode
)
691 static REAL_VALUE_TYPE onedf
;
692 static REAL_VALUE_TYPE onesf
;
693 static int one_initialized
;
695 if ((GET_CODE (op
) != CONST_DOUBLE
)
696 || (mode
!= GET_MODE (op
))
697 || (mode
!= DFmode
&& mode
!= SFmode
))
700 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
702 if (! one_initialized
)
704 onedf
= REAL_VALUE_ATOF ("1.0", DFmode
);
705 onesf
= REAL_VALUE_ATOF ("1.0", SFmode
);
706 one_initialized
= TRUE
;
710 return REAL_VALUES_EQUAL (d
, onedf
);
712 return REAL_VALUES_EQUAL (d
, onesf
);
717 fpmem_offset_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
719 if (GET_CODE (op
) == CONST_INT
)
720 return xtensa_mem_offset (INTVAL (op
), SFmode
);
726 xtensa_extend_reg (rtx dst
, rtx src
)
728 rtx temp
= gen_reg_rtx (SImode
);
729 rtx shift
= GEN_INT (BITS_PER_WORD
- GET_MODE_BITSIZE (GET_MODE (src
)));
731 /* Generate paradoxical subregs as needed so that the modes match. */
732 src
= simplify_gen_subreg (SImode
, src
, GET_MODE (src
), 0);
733 dst
= simplify_gen_subreg (SImode
, dst
, GET_MODE (dst
), 0);
735 emit_insn (gen_ashlsi3 (temp
, src
, shift
));
736 emit_insn (gen_ashrsi3 (dst
, temp
, shift
));
741 branch_operator (rtx x
, enum machine_mode mode
)
743 if (GET_MODE (x
) != mode
)
746 switch (GET_CODE (x
))
761 ubranch_operator (rtx x
, enum machine_mode mode
)
763 if (GET_MODE (x
) != mode
)
766 switch (GET_CODE (x
))
779 boolean_operator (rtx x
, enum machine_mode mode
)
781 if (GET_MODE (x
) != mode
)
784 switch (GET_CODE (x
))
797 xtensa_mask_immediate (int v
)
799 #define MAX_MASK_SIZE 16
802 for (mask_size
= 1; mask_size
<= MAX_MASK_SIZE
; mask_size
++)
816 xtensa_mem_offset (unsigned v
, enum machine_mode mode
)
821 /* Handle the worst case for block moves. See xtensa_expand_block_move
822 where we emit an optimized block move operation if the block can be
823 moved in < "move_ratio" pieces. The worst case is when the block is
824 aligned but has a size of (3 mod 4) (does this happen?) so that the
825 last piece requires a byte load/store. */
826 return (xtensa_uimm8 (v
)
827 && xtensa_uimm8 (v
+ MOVE_MAX
* LARGEST_MOVE_RATIO
));
830 return xtensa_uimm8 (v
);
833 return xtensa_uimm8x2 (v
);
836 return (xtensa_uimm8x4 (v
) && xtensa_uimm8x4 (v
+ 4));
842 return xtensa_uimm8x4 (v
);
846 /* Make normal rtx_code into something we can index from an array. */
848 static enum internal_test
849 map_test_to_internal_test (enum rtx_code test_code
)
851 enum internal_test test
= ITEST_MAX
;
856 case EQ
: test
= ITEST_EQ
; break;
857 case NE
: test
= ITEST_NE
; break;
858 case GT
: test
= ITEST_GT
; break;
859 case GE
: test
= ITEST_GE
; break;
860 case LT
: test
= ITEST_LT
; break;
861 case LE
: test
= ITEST_LE
; break;
862 case GTU
: test
= ITEST_GTU
; break;
863 case GEU
: test
= ITEST_GEU
; break;
864 case LTU
: test
= ITEST_LTU
; break;
865 case LEU
: test
= ITEST_LEU
; break;
872 /* Generate the code to compare two integer values. The return value is
873 the comparison expression. */
876 gen_int_relational (enum rtx_code test_code
, /* relational test (EQ, etc) */
877 rtx cmp0
, /* first operand to compare */
878 rtx cmp1
, /* second operand to compare */
879 int *p_invert
/* whether branch needs to reverse test */)
883 enum rtx_code test_code
; /* test code to use in insn */
884 int (*const_range_p
) (int); /* predicate function to check range */
885 int const_add
; /* constant to add (convert LE -> LT) */
886 int reverse_regs
; /* reverse registers in test */
887 int invert_const
; /* != 0 if invert value if cmp1 is constant */
888 int invert_reg
; /* != 0 if invert value if cmp1 is register */
889 int unsignedp
; /* != 0 for unsigned comparisons. */
892 static struct cmp_info info
[ (int)ITEST_MAX
] = {
894 { EQ
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* EQ */
895 { NE
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* NE */
897 { LT
, b4const_or_zero
, 1, 1, 1, 0, 0 }, /* GT */
898 { GE
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* GE */
899 { LT
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* LT */
900 { GE
, b4const_or_zero
, 1, 1, 1, 0, 0 }, /* LE */
902 { LTU
, xtensa_b4constu
, 1, 1, 1, 0, 1 }, /* GTU */
903 { GEU
, xtensa_b4constu
, 0, 0, 0, 0, 1 }, /* GEU */
904 { LTU
, xtensa_b4constu
, 0, 0, 0, 0, 1 }, /* LTU */
905 { GEU
, xtensa_b4constu
, 1, 1, 1, 0, 1 }, /* LEU */
908 enum internal_test test
;
909 enum machine_mode mode
;
910 struct cmp_info
*p_info
;
912 test
= map_test_to_internal_test (test_code
);
913 if (test
== ITEST_MAX
)
916 p_info
= &info
[ (int)test
];
918 mode
= GET_MODE (cmp0
);
919 if (mode
== VOIDmode
)
920 mode
= GET_MODE (cmp1
);
922 /* Make sure we can handle any constants given to us. */
923 if (GET_CODE (cmp1
) == CONST_INT
)
925 HOST_WIDE_INT value
= INTVAL (cmp1
);
926 unsigned HOST_WIDE_INT uvalue
= (unsigned HOST_WIDE_INT
)value
;
928 /* if the immediate overflows or does not fit in the immediate field,
929 spill it to a register */
931 if ((p_info
->unsignedp
?
932 (uvalue
+ p_info
->const_add
> uvalue
) :
933 (value
+ p_info
->const_add
> value
)) != (p_info
->const_add
> 0))
935 cmp1
= force_reg (mode
, cmp1
);
937 else if (!(p_info
->const_range_p
) (value
+ p_info
->const_add
))
939 cmp1
= force_reg (mode
, cmp1
);
942 else if ((GET_CODE (cmp1
) != REG
) && (GET_CODE (cmp1
) != SUBREG
))
944 cmp1
= force_reg (mode
, cmp1
);
947 /* See if we need to invert the result. */
948 *p_invert
= ((GET_CODE (cmp1
) == CONST_INT
)
949 ? p_info
->invert_const
950 : p_info
->invert_reg
);
952 /* Comparison to constants, may involve adding 1 to change a LT into LE.
953 Comparison between two registers, may involve switching operands. */
954 if (GET_CODE (cmp1
) == CONST_INT
)
956 if (p_info
->const_add
!= 0)
957 cmp1
= GEN_INT (INTVAL (cmp1
) + p_info
->const_add
);
960 else if (p_info
->reverse_regs
)
967 return gen_rtx (p_info
->test_code
, VOIDmode
, cmp0
, cmp1
);
971 /* Generate the code to compare two float values. The return value is
972 the comparison expression. */
975 gen_float_relational (enum rtx_code test_code
, /* relational test (EQ, etc) */
976 rtx cmp0
, /* first operand to compare */
977 rtx cmp1
/* second operand to compare */)
979 rtx (*gen_fn
) (rtx
, rtx
, rtx
);
981 int reverse_regs
, invert
;
985 case EQ
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_seq_sf
; break;
986 case NE
: reverse_regs
= 0; invert
= 1; gen_fn
= gen_seq_sf
; break;
987 case LE
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_sle_sf
; break;
988 case GT
: reverse_regs
= 1; invert
= 0; gen_fn
= gen_slt_sf
; break;
989 case LT
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_slt_sf
; break;
990 case GE
: reverse_regs
= 1; invert
= 0; gen_fn
= gen_sle_sf
; break;
992 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
993 reverse_regs
= 0; invert
= 0; gen_fn
= 0; /* avoid compiler warnings */
1003 brtmp
= gen_rtx_REG (CCmode
, FPCC_REGNUM
);
1004 emit_insn (gen_fn (brtmp
, cmp0
, cmp1
));
1006 return gen_rtx (invert
? EQ
: NE
, VOIDmode
, brtmp
, const0_rtx
);
1011 xtensa_expand_conditional_branch (rtx
*operands
, enum rtx_code test_code
)
1013 enum cmp_type type
= branch_type
;
1014 rtx cmp0
= branch_cmp
[0];
1015 rtx cmp1
= branch_cmp
[1];
1024 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1028 cmp
= gen_int_relational (test_code
, cmp0
, cmp1
, &invert
);
1032 if (!TARGET_HARD_FLOAT
)
1033 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1035 cmp
= gen_float_relational (test_code
, cmp0
, cmp1
);
1039 /* Generate the branch. */
1041 label1
= gen_rtx_LABEL_REF (VOIDmode
, operands
[0]);
1050 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
1051 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp
,
1058 gen_conditional_move (rtx cmp
)
1060 enum rtx_code code
= GET_CODE (cmp
);
1061 rtx op0
= branch_cmp
[0];
1062 rtx op1
= branch_cmp
[1];
1064 if (branch_type
== CMP_SI
)
1066 /* Jump optimization calls get_condition() which canonicalizes
1067 comparisons like (GE x <const>) to (GT x <const-1>).
1068 Transform those comparisons back to GE, since that is the
1069 comparison supported in Xtensa. We shouldn't have to
1070 transform <LE x const> comparisons, because neither
1071 xtensa_expand_conditional_branch() nor get_condition() will
1074 if ((code
== GT
) && (op1
== constm1_rtx
))
1079 cmp
= gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
1081 if (boolean_operator (cmp
, VOIDmode
))
1083 /* Swap the operands to make const0 second. */
1084 if (op0
== const0_rtx
)
1090 /* If not comparing against zero, emit a comparison (subtract). */
1091 if (op1
!= const0_rtx
)
1093 op0
= expand_binop (SImode
, sub_optab
, op0
, op1
,
1094 0, 0, OPTAB_LIB_WIDEN
);
1098 else if (branch_operator (cmp
, VOIDmode
))
1100 /* Swap the operands to make const0 second. */
1101 if (op0
== const0_rtx
)
1108 case LT
: code
= GE
; break;
1109 case GE
: code
= LT
; break;
1114 if (op1
!= const0_rtx
)
1120 return gen_rtx (code
, VOIDmode
, op0
, op1
);
1123 if (TARGET_HARD_FLOAT
&& (branch_type
== CMP_SF
))
1124 return gen_float_relational (code
, op0
, op1
);
1131 xtensa_expand_conditional_move (rtx
*operands
, int isflt
)
1134 rtx (*gen_fn
) (rtx
, rtx
, rtx
, rtx
, rtx
);
1136 if (!(cmp
= gen_conditional_move (operands
[1])))
1140 gen_fn
= (branch_type
== CMP_SI
1141 ? gen_movsfcc_internal0
1142 : gen_movsfcc_internal1
);
1144 gen_fn
= (branch_type
== CMP_SI
1145 ? gen_movsicc_internal0
1146 : gen_movsicc_internal1
);
1148 emit_insn (gen_fn (operands
[0], XEXP (cmp
, 0),
1149 operands
[2], operands
[3], cmp
));
1155 xtensa_expand_scc (rtx
*operands
)
1157 rtx dest
= operands
[0];
1158 rtx cmp
= operands
[1];
1159 rtx one_tmp
, zero_tmp
;
1160 rtx (*gen_fn
) (rtx
, rtx
, rtx
, rtx
, rtx
);
1162 if (!(cmp
= gen_conditional_move (cmp
)))
1165 one_tmp
= gen_reg_rtx (SImode
);
1166 zero_tmp
= gen_reg_rtx (SImode
);
1167 emit_insn (gen_movsi (one_tmp
, const_true_rtx
));
1168 emit_insn (gen_movsi (zero_tmp
, const0_rtx
));
1170 gen_fn
= (branch_type
== CMP_SI
1171 ? gen_movsicc_internal0
1172 : gen_movsicc_internal1
);
1173 emit_insn (gen_fn (dest
, XEXP (cmp
, 0), one_tmp
, zero_tmp
, cmp
));
1178 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1179 for the output, i.e., the input operands are twice as big as MODE. */
1182 xtensa_split_operand_pair (rtx operands
[4], enum machine_mode mode
)
1184 switch (GET_CODE (operands
[1]))
1187 operands
[3] = gen_rtx_REG (mode
, REGNO (operands
[1]) + 1);
1188 operands
[2] = gen_rtx_REG (mode
, REGNO (operands
[1]));
1192 operands
[3] = adjust_address (operands
[1], mode
, GET_MODE_SIZE (mode
));
1193 operands
[2] = adjust_address (operands
[1], mode
, 0);
1198 split_double (operands
[1], &operands
[2], &operands
[3]);
1205 switch (GET_CODE (operands
[0]))
1208 operands
[1] = gen_rtx_REG (mode
, REGNO (operands
[0]) + 1);
1209 operands
[0] = gen_rtx_REG (mode
, REGNO (operands
[0]));
1213 operands
[1] = adjust_address (operands
[0], mode
, GET_MODE_SIZE (mode
));
1214 operands
[0] = adjust_address (operands
[0], mode
, 0);
1223 /* Emit insns to move operands[1] into operands[0].
1224 Return 1 if we have written out everything that needs to be done to
1225 do the move. Otherwise, return 0 and the caller will emit the move
1229 xtensa_emit_move_sequence (rtx
*operands
, enum machine_mode mode
)
1231 if (CONSTANT_P (operands
[1])
1232 && GET_CODE (operands
[1]) != CONSTANT_P_RTX
1233 && (GET_CODE (operands
[1]) != CONST_INT
1234 || !xtensa_simm12b (INTVAL (operands
[1]))))
1236 if (!TARGET_CONST16
)
1237 operands
[1] = force_const_mem (SImode
, operands
[1]);
1239 /* PC-relative loads are always SImode, and CONST16 is only
1240 supported in the movsi pattern, so add a SUBREG for any other
1245 if (register_operand (operands
[0], mode
))
1247 operands
[0] = simplify_gen_subreg (SImode
, operands
[0], mode
, 0);
1248 emit_move_insn (operands
[0], operands
[1]);
1253 operands
[1] = force_reg (SImode
, operands
[1]);
1254 operands
[1] = gen_lowpart_SUBREG (mode
, operands
[1]);
1259 if (!(reload_in_progress
| reload_completed
))
1261 if (!xtensa_valid_move (mode
, operands
))
1262 operands
[1] = force_reg (mode
, operands
[1]);
1264 if (xtensa_copy_incoming_a7 (operands
, mode
))
1268 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1269 instruction won't be recognized after reload, so we remove the
1270 subreg and adjust mem accordingly. */
1271 if (reload_in_progress
)
1273 operands
[0] = fixup_subreg_mem (operands
[0]);
1274 operands
[1] = fixup_subreg_mem (operands
[1]);
1281 fixup_subreg_mem (rtx x
)
1283 if (GET_CODE (x
) == SUBREG
1284 && GET_CODE (SUBREG_REG (x
)) == REG
1285 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1288 gen_rtx_SUBREG (GET_MODE (x
),
1289 reg_equiv_mem
[REGNO (SUBREG_REG (x
))],
1291 x
= alter_subreg (&temp
);
1297 /* Check if this move is copying an incoming argument in a7. If so,
1298 emit the move, followed by the special "set_frame_ptr"
1299 unspec_volatile insn, at the very beginning of the function. This
1300 is necessary because the register allocator will ignore conflicts
1301 with a7 and may assign some other pseudo to a7. If that pseudo was
1302 assigned prior to this move, it would clobber the incoming argument
1303 in a7. By copying the argument out of a7 as the very first thing,
1304 and then immediately following that with an unspec_volatile to keep
1305 the scheduler away, we should avoid any problems. */
1308 xtensa_copy_incoming_a7 (rtx
*operands
, enum machine_mode mode
)
1310 if (a7_overlap_mentioned_p (operands
[1])
1311 && !cfun
->machine
->incoming_a7_copied
)
1317 mov
= gen_movdf_internal (operands
[0], operands
[1]);
1320 mov
= gen_movsf_internal (operands
[0], operands
[1]);
1323 mov
= gen_movdi_internal (operands
[0], operands
[1]);
1326 mov
= gen_movsi_internal (operands
[0], operands
[1]);
1329 mov
= gen_movhi_internal (operands
[0], operands
[1]);
1332 mov
= gen_movqi_internal (operands
[0], operands
[1]);
1338 /* Insert the instructions before any other argument copies.
1339 (The set_frame_ptr insn comes _after_ the move, so push it
1341 push_topmost_sequence ();
1342 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1343 emit_insn_after (mov
, get_insns ());
1344 pop_topmost_sequence ();
1346 /* Ideally the incoming argument in a7 would only be copied
1347 once, since propagating a7 into the body of a function
1348 will almost certainly lead to errors. However, there is
1349 at least one harmless case (in GCSE) where the original
1350 copy from a7 is changed to copy into a new pseudo. Thus,
1351 we use a flag to only do this special treatment for the
1352 first copy of a7. */
1354 cfun
->machine
->incoming_a7_copied
= true;
1363 /* Try to expand a block move operation to an RTL block move instruction.
1364 If not optimizing or if the block size is not a constant or if the
1365 block is small, the expansion fails and GCC falls back to calling
1368 operands[0] is the destination
1369 operands[1] is the source
1370 operands[2] is the length
1371 operands[3] is the alignment */
1374 xtensa_expand_block_move (rtx
*operands
)
1376 rtx dest
= operands
[0];
1377 rtx src
= operands
[1];
1378 int bytes
= INTVAL (operands
[2]);
1379 int align
= XINT (operands
[3], 0);
1380 int num_pieces
, move_ratio
;
1382 /* If this is not a fixed size move, just call memcpy. */
1383 if (!optimize
|| (GET_CODE (operands
[2]) != CONST_INT
))
1386 /* Anything to move? */
1390 if (align
> MOVE_MAX
)
1393 /* Decide whether to expand inline based on the optimization level. */
1396 move_ratio
= LARGEST_MOVE_RATIO
;
1397 num_pieces
= (bytes
/ align
) + (bytes
% align
); /* Close enough anyway. */
1398 if (num_pieces
>= move_ratio
)
1401 /* Make sure the memory addresses are valid. */
1402 operands
[0] = validize_mem (dest
);
1403 operands
[1] = validize_mem (src
);
1405 emit_insn (gen_movstrsi_internal (operands
[0], operands
[1],
1406 operands
[2], operands
[3]));
1411 /* Emit a sequence of instructions to implement a block move, trying
1412 to hide load delay slots as much as possible. Load N values into
1413 temporary registers, store those N values, and repeat until the
1414 complete block has been moved. N=delay_slots+1. */
1423 xtensa_emit_block_move (rtx
*operands
, rtx
*tmpregs
, int delay_slots
)
1425 rtx dest
= operands
[0];
1426 rtx src
= operands
[1];
1427 int bytes
= INTVAL (operands
[2]);
1428 int align
= XINT (operands
[3], 0);
1429 rtx from_addr
= XEXP (src
, 0);
1430 rtx to_addr
= XEXP (dest
, 0);
1431 int from_struct
= MEM_IN_STRUCT_P (src
);
1432 int to_struct
= MEM_IN_STRUCT_P (dest
);
1434 int chunk_size
, item_size
;
1435 struct meminsnbuf
*ldinsns
, *stinsns
;
1436 const char *ldname
, *stname
;
1437 enum machine_mode mode
;
1439 if (align
> MOVE_MAX
)
1442 chunk_size
= delay_slots
+ 1;
1444 ldinsns
= (struct meminsnbuf
*)
1445 alloca (chunk_size
* sizeof (struct meminsnbuf
));
1446 stinsns
= (struct meminsnbuf
*)
1447 alloca (chunk_size
* sizeof (struct meminsnbuf
));
1449 mode
= xtensa_find_mode_for_size (item_size
);
1450 item_size
= GET_MODE_SIZE (mode
);
1451 ldname
= xtensa_ld_opcodes
[(int) mode
];
1452 stname
= xtensa_st_opcodes
[(int) mode
];
1458 for (n
= 0; n
< chunk_size
; n
++)
1468 if (bytes
< item_size
)
1470 /* Find a smaller item_size which we can load & store. */
1472 mode
= xtensa_find_mode_for_size (item_size
);
1473 item_size
= GET_MODE_SIZE (mode
);
1474 ldname
= xtensa_ld_opcodes
[(int) mode
];
1475 stname
= xtensa_st_opcodes
[(int) mode
];
1478 /* Record the load instruction opcode and operands. */
1479 addr
= plus_constant (from_addr
, offset
);
1480 mem
= gen_rtx_MEM (mode
, addr
);
1481 if (! memory_address_p (mode
, addr
))
1483 MEM_IN_STRUCT_P (mem
) = from_struct
;
1484 ldinsns
[n
].operands
[0] = tmpregs
[n
];
1485 ldinsns
[n
].operands
[1] = mem
;
1486 sprintf (ldinsns
[n
].template, "%s\t%%0, %%1", ldname
);
1488 /* Record the store instruction opcode and operands. */
1489 addr
= plus_constant (to_addr
, offset
);
1490 mem
= gen_rtx_MEM (mode
, addr
);
1491 if (! memory_address_p (mode
, addr
))
1493 MEM_IN_STRUCT_P (mem
) = to_struct
;
1494 stinsns
[n
].operands
[0] = tmpregs
[n
];
1495 stinsns
[n
].operands
[1] = mem
;
1496 sprintf (stinsns
[n
].template, "%s\t%%0, %%1", stname
);
1498 offset
+= item_size
;
1502 /* Now output the loads followed by the stores. */
1503 for (n
= 0; n
< chunk_size
; n
++)
1504 output_asm_insn (ldinsns
[n
].template, ldinsns
[n
].operands
);
1505 for (n
= 0; n
< chunk_size
; n
++)
1506 output_asm_insn (stinsns
[n
].template, stinsns
[n
].operands
);
1511 static enum machine_mode
1512 xtensa_find_mode_for_size (unsigned item_size
)
1514 enum machine_mode mode
, tmode
;
1520 /* Find mode closest to but not bigger than item_size. */
1521 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1522 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1523 if (GET_MODE_SIZE (tmode
) <= item_size
)
1525 if (mode
== VOIDmode
)
1528 item_size
= GET_MODE_SIZE (mode
);
1530 if (xtensa_ld_opcodes
[(int) mode
]
1531 && xtensa_st_opcodes
[(int) mode
])
1534 /* Cannot load & store this mode; try something smaller. */
1543 xtensa_expand_nonlocal_goto (rtx
*operands
)
1545 rtx goto_handler
= operands
[1];
1546 rtx containing_fp
= operands
[3];
1548 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1549 is too big to generate in-line. */
1551 if (GET_CODE (containing_fp
) != REG
)
1552 containing_fp
= force_reg (Pmode
, containing_fp
);
1554 goto_handler
= replace_rtx (copy_rtx (goto_handler
),
1555 virtual_stack_vars_rtx
,
1558 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__xtensa_nonlocal_goto"),
1560 containing_fp
, Pmode
,
1561 goto_handler
, Pmode
);
1565 static struct machine_function
*
1566 xtensa_init_machine_status (void)
1568 return ggc_alloc_cleared (sizeof (struct machine_function
));
1573 xtensa_setup_frame_addresses (void)
1575 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1576 cfun
->machine
->accesses_prev_frame
= 1;
1579 (gen_rtx_SYMBOL_REF (Pmode
, "__xtensa_libgcc_window_spill"),
1584 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1585 a comment showing where the end of the loop is. However, if there is a
1586 label or a branch at the end of the loop then we need to place a nop
1587 there. If the loop ends with a label we need the nop so that branches
1588 targeting that label will target the nop (and thus remain in the loop),
1589 instead of targeting the instruction after the loop (and thus exiting
1590 the loop). If the loop ends with a branch, we need the nop in case the
1591 branch is targeting a location inside the loop. When the branch
1592 executes it will cause the loop count to be decremented even if it is
1593 taken (because it is the last instruction in the loop), so we need to
1594 nop after the branch to prevent the loop count from being decremented
1595 when the branch is taken. */
1598 xtensa_emit_loop_end (rtx insn
, rtx
*operands
)
1602 for (insn
= PREV_INSN (insn
); insn
&& !done
; insn
= PREV_INSN (insn
))
1604 switch (GET_CODE (insn
))
1611 output_asm_insn (TARGET_DENSITY
? "nop.n" : "nop", operands
);
1617 rtx body
= PATTERN (insn
);
1619 if (GET_CODE (body
) == JUMP_INSN
)
1621 output_asm_insn (TARGET_DENSITY
? "nop.n" : "nop", operands
);
1624 else if ((GET_CODE (body
) != USE
)
1625 && (GET_CODE (body
) != CLOBBER
))
1632 output_asm_insn ("# loop end for %0", operands
);
1637 xtensa_emit_call (int callop
, rtx
*operands
)
1639 static char result
[64];
1640 rtx tgt
= operands
[callop
];
1642 if (GET_CODE (tgt
) == CONST_INT
)
1643 sprintf (result
, "call8\t0x%lx", INTVAL (tgt
));
1644 else if (register_operand (tgt
, VOIDmode
))
1645 sprintf (result
, "callx8\t%%%d", callop
);
1647 sprintf (result
, "call8\t%%%d", callop
);
1653 /* Return the debugger register number to use for 'regno'. */
1656 xtensa_dbx_register_number (int regno
)
1660 if (GP_REG_P (regno
))
1662 regno
-= GP_REG_FIRST
;
1665 else if (BR_REG_P (regno
))
1667 regno
-= BR_REG_FIRST
;
1670 else if (FP_REG_P (regno
))
1672 regno
-= FP_REG_FIRST
;
1675 else if (ACC_REG_P (regno
))
1677 first
= 0x200; /* Start of Xtensa special registers. */
1678 regno
= 16; /* ACCLO is special register 16. */
1681 /* When optimizing, we sometimes get asked about pseudo-registers
1682 that don't represent hard registers. Return 0 for these. */
1686 return first
+ regno
;
1690 /* Argument support functions. */
1692 /* Initialize CUMULATIVE_ARGS for a function. */
1695 init_cumulative_args (CUMULATIVE_ARGS
*cum
,
1696 tree fntype ATTRIBUTE_UNUSED
,
1697 rtx libname ATTRIBUTE_UNUSED
)
1703 /* Advance the argument to the next argument position. */
1706 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
)
1711 arg_words
= &cum
->arg_words
;
1712 max
= MAX_ARGS_IN_REGISTERS
;
1714 words
= (((mode
!= BLKmode
)
1715 ? (int) GET_MODE_SIZE (mode
)
1716 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1718 if ((*arg_words
+ words
> max
) && (*arg_words
< max
))
1721 *arg_words
+= words
;
1725 /* Return an RTL expression containing the register for the given mode,
1726 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1727 if this is an incoming argument to the current function. */
1730 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1733 int regbase
, words
, max
;
1736 enum machine_mode result_mode
;
1738 arg_words
= &cum
->arg_words
;
1739 regbase
= (incoming_p
? GP_ARG_FIRST
: GP_OUTGOING_ARG_FIRST
);
1740 max
= MAX_ARGS_IN_REGISTERS
;
1742 words
= (((mode
!= BLKmode
)
1743 ? (int) GET_MODE_SIZE (mode
)
1744 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1746 if (type
&& (TYPE_ALIGN (type
) > BITS_PER_WORD
))
1747 *arg_words
+= (*arg_words
& 1);
1749 if (*arg_words
+ words
> max
)
1752 regno
= regbase
+ *arg_words
;
1753 result_mode
= (mode
== BLKmode
? TYPE_MODE (type
) : mode
);
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For multi-word
1757 modes for which we don't define move patterns, we can't control
1758 the expansion unless we explicitly list the individual registers
1761 if (mode
!= DImode
&& mode
!= DFmode
1763 && regno
+ words
> A7_REG
)
1768 result
= gen_rtx_PARALLEL (result_mode
, rtvec_alloc (words
));
1769 for (n
= 0; n
< words
; n
++)
1771 XVECEXP (result
, 0, n
) =
1772 gen_rtx_EXPR_LIST (VOIDmode
,
1773 gen_raw_REG (SImode
, regno
+ n
),
1774 GEN_INT (n
* UNITS_PER_WORD
));
1779 return gen_raw_REG (result_mode
, regno
);
1784 override_options (void)
1787 enum machine_mode mode
;
1789 if (!TARGET_BOOLEANS
&& TARGET_HARD_FLOAT
)
1790 error ("boolean registers required for the floating-point option");
1792 /* Set up the tables of ld/st opcode names for block moves. */
1793 xtensa_ld_opcodes
[(int) SImode
] = "l32i";
1794 xtensa_ld_opcodes
[(int) HImode
] = "l16ui";
1795 xtensa_ld_opcodes
[(int) QImode
] = "l8ui";
1796 xtensa_st_opcodes
[(int) SImode
] = "s32i";
1797 xtensa_st_opcodes
[(int) HImode
] = "s16i";
1798 xtensa_st_opcodes
[(int) QImode
] = "s8i";
1800 xtensa_char_to_class
['q'] = SP_REG
;
1801 xtensa_char_to_class
['a'] = GR_REGS
;
1802 xtensa_char_to_class
['b'] = ((TARGET_BOOLEANS
) ? BR_REGS
: NO_REGS
);
1803 xtensa_char_to_class
['f'] = ((TARGET_HARD_FLOAT
) ? FP_REGS
: NO_REGS
);
1804 xtensa_char_to_class
['A'] = ((TARGET_MAC16
) ? ACC_REG
: NO_REGS
);
1805 xtensa_char_to_class
['B'] = ((TARGET_SEXT
) ? GR_REGS
: NO_REGS
);
1806 xtensa_char_to_class
['C'] = ((TARGET_MUL16
) ? GR_REGS
: NO_REGS
);
1807 xtensa_char_to_class
['D'] = ((TARGET_DENSITY
) ? GR_REGS
: NO_REGS
);
1808 xtensa_char_to_class
['d'] = ((TARGET_DENSITY
) ? AR_REGS
: NO_REGS
);
1809 xtensa_char_to_class
['W'] = ((TARGET_CONST16
) ? GR_REGS
: NO_REGS
);
1811 /* Set up array giving whether a given register can hold a given mode. */
1812 for (mode
= VOIDmode
;
1813 mode
!= MAX_MACHINE_MODE
;
1814 mode
= (enum machine_mode
) ((int) mode
+ 1))
1816 int size
= GET_MODE_SIZE (mode
);
1817 enum mode_class
class = GET_MODE_CLASS (mode
);
1819 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1823 if (ACC_REG_P (regno
))
1824 temp
= (TARGET_MAC16
1825 && (class == MODE_INT
) && (size
<= UNITS_PER_WORD
));
1826 else if (GP_REG_P (regno
))
1827 temp
= ((regno
& 1) == 0 || (size
<= UNITS_PER_WORD
));
1828 else if (FP_REG_P (regno
))
1829 temp
= (TARGET_HARD_FLOAT
&& (mode
== SFmode
));
1830 else if (BR_REG_P (regno
))
1831 temp
= (TARGET_BOOLEANS
&& (mode
== CCmode
));
1835 xtensa_hard_regno_mode_ok
[(int) mode
][regno
] = temp
;
1839 init_machine_status
= xtensa_init_machine_status
;
1841 /* Check PIC settings. PIC is only supported when using L32R
1842 instructions, and some targets need to always use PIC. */
1843 if (flag_pic
&& TARGET_CONST16
)
1844 error ("-f%s is not supported with CONST16 instructions",
1845 (flag_pic
> 1 ? "PIC" : "pic"));
1846 else if (XTENSA_ALWAYS_PIC
)
1849 error ("PIC is required but not supported with CONST16 instructions");
1852 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1858 /* A C compound statement to output to stdio stream STREAM the
1859 assembler syntax for an instruction operand X. X is an RTL
1862 CODE is a value that can be used to specify one of several ways
1863 of printing the operand. It is used when identical operands
1864 must be printed differently depending on the context. CODE
1865 comes from the '%' specification that was used to request
1866 printing of the operand. If the specification was just '%DIGIT'
1867 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1868 is the ASCII code for LTR.
1870 If X is a register, this macro should print the register's name.
1871 The names can be found in an array 'reg_names' whose type is
1872 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1874 When the machine description has a specification '%PUNCT' (a '%'
1875 followed by a punctuation character), this macro is called with
1876 a null pointer for X and the punctuation character for CODE.
1878 'a', 'c', 'l', and 'n' are reserved.
1880 The Xtensa specific codes are:
1882 'd' CONST_INT, print as signed decimal
1883 'x' CONST_INT, print as signed hexadecimal
1884 'K' CONST_INT, print number of bits in mask for EXTUI
1885 'R' CONST_INT, print (X & 0x1f)
1886 'L' CONST_INT, print ((32 - X) & 0x1f)
1887 'D' REG, print second register of double-word register operand
1888 'N' MEM, print address of next word following a memory operand
1889 'v' MEM, if memory reference is volatile, output a MEMW before it
1890 't' any constant, add "@h" suffix for top 16 bits
1891 'b' any constant, add "@l" suffix for bottom 16 bits
1895 printx (FILE *file
, signed int val
)
1897 /* Print a hexadecimal value in a nice way. */
1898 if ((val
> -0xa) && (val
< 0xa))
1899 fprintf (file
, "%d", val
);
1901 fprintf (file
, "-0x%x", -val
);
1903 fprintf (file
, "0x%x", val
);
1908 print_operand (FILE *file
, rtx x
, int letter
)
1911 error ("PRINT_OPERAND null pointer");
1916 if (GET_CODE (x
) == REG
|| GET_CODE (x
) == SUBREG
)
1917 fprintf (file
, "%s", reg_names
[xt_true_regnum (x
) + 1]);
1919 output_operand_lossage ("invalid %%D value");
1923 if (GET_CODE (x
) == MEM
)
1925 /* For a volatile memory reference, emit a MEMW before the
1927 if (MEM_VOLATILE_P (x
))
1928 fprintf (file
, "memw\n\t");
1931 output_operand_lossage ("invalid %%v value");
1935 if (GET_CODE (x
) == MEM
1936 && (GET_MODE (x
) == DFmode
|| GET_MODE (x
) == DImode
))
1938 x
= adjust_address (x
, GET_MODE (x
) == DFmode
? SFmode
: SImode
, 4);
1939 output_address (XEXP (x
, 0));
1942 output_operand_lossage ("invalid %%N value");
1946 if (GET_CODE (x
) == CONST_INT
)
1949 unsigned val
= INTVAL (x
);
1955 if ((val
!= 0) || (num_bits
== 0) || (num_bits
> 16))
1956 fatal_insn ("invalid mask", x
);
1958 fprintf (file
, "%d", num_bits
);
1961 output_operand_lossage ("invalid %%K value");
1965 if (GET_CODE (x
) == CONST_INT
)
1966 fprintf (file
, "%ld", (32 - INTVAL (x
)) & 0x1f);
1968 output_operand_lossage ("invalid %%L value");
1972 if (GET_CODE (x
) == CONST_INT
)
1973 fprintf (file
, "%ld", INTVAL (x
) & 0x1f);
1975 output_operand_lossage ("invalid %%R value");
1979 if (GET_CODE (x
) == CONST_INT
)
1980 printx (file
, INTVAL (x
));
1982 output_operand_lossage ("invalid %%x value");
1986 if (GET_CODE (x
) == CONST_INT
)
1987 fprintf (file
, "%ld", INTVAL (x
));
1989 output_operand_lossage ("invalid %%d value");
1994 if (GET_CODE (x
) == CONST_INT
)
1996 printx (file
, INTVAL (x
));
1997 fputs (letter
== 't' ? "@h" : "@l", file
);
1999 else if (GET_CODE (x
) == CONST_DOUBLE
)
2002 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2003 if (GET_MODE (x
) == SFmode
)
2006 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
2007 fprintf (file
, "0x%08lx@%c", l
, letter
== 't' ? 'h' : 'l');
2010 output_operand_lossage ("invalid %%t/%%b value");
2012 else if (GET_CODE (x
) == CONST
)
2014 /* X must be a symbolic constant on ELF. Write an expression
2015 suitable for 'const16' that sets the high or low 16 bits. */
2016 if (GET_CODE (XEXP (x
, 0)) != PLUS
2017 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
2018 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
2019 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
2020 output_operand_lossage ("invalid %%t/%%b value");
2021 print_operand (file
, XEXP (XEXP (x
, 0), 0), 0);
2022 fputs (letter
== 't' ? "@h" : "@l", file
);
2023 /* There must be a non-alphanumeric character between 'h' or 'l'
2024 and the number. The '-' is added by print_operand() already. */
2025 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
2027 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
2031 output_addr_const (file
, x
);
2032 fputs (letter
== 't' ? "@h" : "@l", file
);
2037 if (GET_CODE (x
) == REG
|| GET_CODE (x
) == SUBREG
)
2038 fprintf (file
, "%s", reg_names
[xt_true_regnum (x
)]);
2039 else if (GET_CODE (x
) == MEM
)
2040 output_address (XEXP (x
, 0));
2041 else if (GET_CODE (x
) == CONST_INT
)
2042 fprintf (file
, "%ld", INTVAL (x
));
2044 output_addr_const (file
, x
);
2049 /* A C compound statement to output to stdio stream STREAM the
2050 assembler syntax for an instruction operand that is a memory
2051 reference whose address is ADDR. ADDR is an RTL expression. */
2054 print_operand_address (FILE *file
, rtx addr
)
2057 error ("PRINT_OPERAND_ADDRESS, null pointer");
2059 switch (GET_CODE (addr
))
2062 fatal_insn ("invalid address", addr
);
2066 fprintf (file
, "%s, 0", reg_names
[REGNO (addr
)]);
2072 rtx offset
= (rtx
)0;
2073 rtx arg0
= XEXP (addr
, 0);
2074 rtx arg1
= XEXP (addr
, 1);
2076 if (GET_CODE (arg0
) == REG
)
2081 else if (GET_CODE (arg1
) == REG
)
2087 fatal_insn ("no register in address", addr
);
2089 if (CONSTANT_P (offset
))
2091 fprintf (file
, "%s, ", reg_names
[REGNO (reg
)]);
2092 output_addr_const (file
, offset
);
2095 fatal_insn ("address offset not a constant", addr
);
2103 output_addr_const (file
, addr
);
2110 xtensa_output_literal (FILE *file
, rtx x
, enum machine_mode mode
, int labelno
)
2116 fprintf (file
, "\t.literal .LC%u, ", (unsigned) labelno
);
2118 switch (GET_MODE_CLASS (mode
))
2121 if (GET_CODE (x
) != CONST_DOUBLE
)
2124 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2128 REAL_VALUE_TO_TARGET_SINGLE (r
, value_long
[0]);
2129 fprintf (file
, "0x%08lx\n", value_long
[0]);
2133 REAL_VALUE_TO_TARGET_DOUBLE (r
, value_long
);
2134 fprintf (file
, "0x%08lx, 0x%08lx\n",
2135 value_long
[0], value_long
[1]);
2145 case MODE_PARTIAL_INT
:
2146 size
= GET_MODE_SIZE (mode
);
2149 output_addr_const (file
, x
);
2154 output_addr_const (file
, operand_subword (x
, 0, 0, DImode
));
2156 output_addr_const (file
, operand_subword (x
, 1, 0, DImode
));
2169 /* Return the bytes needed to compute the frame pointer from the current
2172 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2173 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2176 compute_frame_size (int size
)
2178 /* Add space for the incoming static chain value. */
2179 if (current_function_needs_context
)
2180 size
+= (1 * UNITS_PER_WORD
);
2182 xtensa_current_frame_size
=
2183 XTENSA_STACK_ALIGN (size
2184 + current_function_outgoing_args_size
2185 + (WINDOW_SIZE
* UNITS_PER_WORD
));
2186 return xtensa_current_frame_size
;
2191 xtensa_frame_pointer_required (void)
2193 /* The code to expand builtin_frame_addr and builtin_return_addr
2194 currently uses the hard_frame_pointer instead of frame_pointer.
2195 This seems wrong but maybe it's necessary for other architectures.
2196 This function is derived from the i386 code. */
2198 if (cfun
->machine
->accesses_prev_frame
)
2206 xtensa_expand_prologue (void)
2208 HOST_WIDE_INT total_size
;
2211 total_size
= compute_frame_size (get_frame_size ());
2212 size_rtx
= GEN_INT (total_size
);
2214 if (total_size
< (1 << (12+3)))
2215 emit_insn (gen_entry (size_rtx
, size_rtx
));
2218 /* Use a8 as a temporary since a0-a7 may be live. */
2219 rtx tmp_reg
= gen_rtx_REG (Pmode
, A8_REG
);
2220 emit_insn (gen_entry (size_rtx
, GEN_INT (MIN_FRAME_SIZE
)));
2221 emit_move_insn (tmp_reg
, GEN_INT (total_size
- MIN_FRAME_SIZE
));
2222 emit_insn (gen_subsi3 (tmp_reg
, stack_pointer_rtx
, tmp_reg
));
2223 emit_move_insn (stack_pointer_rtx
, tmp_reg
);
2226 if (frame_pointer_needed
)
2228 rtx first
, insn
, set_frame_ptr_insn
= 0;
2230 push_topmost_sequence ();
2231 first
= get_insns ();
2232 pop_topmost_sequence ();
2234 /* Search all instructions, looking for the insn that sets up the
2235 frame pointer. This search will fail if the function does not
2236 have an incoming argument in $a7, but in that case, we can just
2237 set up the frame pointer at the very beginning of the
2240 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2247 pat
= PATTERN (insn
);
2248 if (GET_CODE (pat
) == SET
2249 && GET_CODE (SET_SRC (pat
)) == UNSPEC_VOLATILE
2250 && (XINT (SET_SRC (pat
), 1) == UNSPECV_SET_FP
))
2252 set_frame_ptr_insn
= insn
;
2257 if (set_frame_ptr_insn
)
2259 /* For all instructions prior to set_frame_ptr_insn, replace
2260 hard_frame_pointer references with stack_pointer. */
2262 insn
!= set_frame_ptr_insn
;
2263 insn
= NEXT_INSN (insn
))
2266 PATTERN (insn
) = replace_rtx (copy_rtx (PATTERN (insn
)),
2267 hard_frame_pointer_rtx
,
2272 emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
2277 /* Clear variables at function end. */
2280 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED
,
2281 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
2283 xtensa_current_frame_size
= 0;
2288 xtensa_return_addr (int count
, rtx frame
)
2290 rtx result
, retaddr
;
2293 retaddr
= gen_rtx_REG (Pmode
, A0_REG
);
2296 rtx addr
= plus_constant (frame
, -4 * UNITS_PER_WORD
);
2297 addr
= memory_address (Pmode
, addr
);
2298 retaddr
= gen_reg_rtx (Pmode
);
2299 emit_move_insn (retaddr
, gen_rtx_MEM (Pmode
, addr
));
2302 /* The 2 most-significant bits of the return address on Xtensa hold
2303 the register window size. To get the real return address, these
2304 bits must be replaced with the high bits from the current PC. */
2306 result
= gen_reg_rtx (Pmode
);
2307 emit_insn (gen_fix_return_addr (result
, retaddr
));
2312 /* Create the va_list data type.
2313 This structure is set up by __builtin_saveregs. The __va_reg
2314 field points to a stack-allocated region holding the contents of the
2315 incoming argument registers. The __va_ndx field is an index initialized
2316 to the position of the first unnamed (variable) argument. This same index
2317 is also used to address the arguments passed in memory. Thus, the
2318 __va_stk field is initialized to point to the position of the first
2319 argument in memory offset to account for the arguments passed in
2320 registers. E.G., if there are 6 argument registers, and each register is
2321 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2322 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2323 argument word N for N >= 6. */
2326 xtensa_build_builtin_va_list (void)
2328 tree f_stk
, f_reg
, f_ndx
, record
, type_decl
;
2330 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
2331 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
2333 f_stk
= build_decl (FIELD_DECL
, get_identifier ("__va_stk"),
2335 f_reg
= build_decl (FIELD_DECL
, get_identifier ("__va_reg"),
2337 f_ndx
= build_decl (FIELD_DECL
, get_identifier ("__va_ndx"),
2340 DECL_FIELD_CONTEXT (f_stk
) = record
;
2341 DECL_FIELD_CONTEXT (f_reg
) = record
;
2342 DECL_FIELD_CONTEXT (f_ndx
) = record
;
2344 TREE_CHAIN (record
) = type_decl
;
2345 TYPE_NAME (record
) = type_decl
;
2346 TYPE_FIELDS (record
) = f_stk
;
2347 TREE_CHAIN (f_stk
) = f_reg
;
2348 TREE_CHAIN (f_reg
) = f_ndx
;
2350 layout_type (record
);
2355 /* Save the incoming argument registers on the stack. Returns the
2356 address of the saved registers. */
2359 xtensa_builtin_saveregs (void)
2362 int arg_words
= current_function_arg_words
;
2363 int gp_left
= MAX_ARGS_IN_REGISTERS
- arg_words
;
2369 /* Allocate the general-purpose register space. */
2370 gp_regs
= assign_stack_local
2371 (BLKmode
, MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, -1);
2372 set_mem_alias_set (gp_regs
, get_varargs_alias_set ());
2374 /* Now store the incoming registers. */
2375 dest
= change_address (gp_regs
, SImode
,
2376 plus_constant (XEXP (gp_regs
, 0),
2377 arg_words
* UNITS_PER_WORD
));
2379 /* Note: Don't use move_block_from_reg() here because the incoming
2380 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2381 Instead, call gen_raw_REG() directly so that we get a distinct
2382 instance of (REG:SI 7). */
2383 for (i
= 0; i
< gp_left
; i
++)
2385 emit_move_insn (operand_subword (dest
, i
, 1, BLKmode
),
2386 gen_raw_REG (SImode
, GP_ARG_FIRST
+ arg_words
+ i
));
2389 return XEXP (gp_regs
, 0);
2393 /* Implement `va_start' for varargs and stdarg. We look at the
2394 current function to fill in an initial va_list. */
2397 xtensa_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
2405 arg_words
= current_function_args_info
.arg_words
;
2407 f_stk
= TYPE_FIELDS (va_list_type_node
);
2408 f_reg
= TREE_CHAIN (f_stk
);
2409 f_ndx
= TREE_CHAIN (f_reg
);
2411 stk
= build (COMPONENT_REF
, TREE_TYPE (f_stk
), valist
, f_stk
);
2412 reg
= build (COMPONENT_REF
, TREE_TYPE (f_reg
), valist
, f_reg
);
2413 ndx
= build (COMPONENT_REF
, TREE_TYPE (f_ndx
), valist
, f_ndx
);
2415 /* Call __builtin_saveregs; save the result in __va_reg */
2416 current_function_arg_words
= arg_words
;
2417 u
= make_tree (ptr_type_node
, expand_builtin_saveregs ());
2418 t
= build (MODIFY_EXPR
, ptr_type_node
, reg
, u
);
2419 TREE_SIDE_EFFECTS (t
) = 1;
2420 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2422 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2423 u
= make_tree (ptr_type_node
, virtual_incoming_args_rtx
);
2424 u
= fold (build (PLUS_EXPR
, ptr_type_node
, u
,
2425 build_int_2 (-MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, -1)));
2426 t
= build (MODIFY_EXPR
, ptr_type_node
, stk
, u
);
2427 TREE_SIDE_EFFECTS (t
) = 1;
2428 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2430 /* Set the __va_ndx member. */
2431 u
= build_int_2 (arg_words
* UNITS_PER_WORD
, 0);
2432 t
= build (MODIFY_EXPR
, integer_type_node
, ndx
, u
);
2433 TREE_SIDE_EFFECTS (t
) = 1;
2434 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2438 /* Implement `va_arg'. */
2441 xtensa_va_arg (tree valist
, tree type
)
2446 tree tmp
, addr_tree
, type_size
;
2447 rtx array
, orig_ndx
, r
, addr
, size
, va_size
;
2448 rtx lab_false
, lab_over
, lab_false2
;
2450 f_stk
= TYPE_FIELDS (va_list_type_node
);
2451 f_reg
= TREE_CHAIN (f_stk
);
2452 f_ndx
= TREE_CHAIN (f_reg
);
2454 stk
= build (COMPONENT_REF
, TREE_TYPE (f_stk
), valist
, f_stk
);
2455 reg
= build (COMPONENT_REF
, TREE_TYPE (f_reg
), valist
, f_reg
);
2456 ndx
= build (COMPONENT_REF
, TREE_TYPE (f_ndx
), valist
, f_ndx
);
2458 type_size
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
2460 va_size
= gen_reg_rtx (SImode
);
2461 tmp
= fold (build (MULT_EXPR
, sizetype
,
2462 fold (build (TRUNC_DIV_EXPR
, sizetype
,
2463 fold (build (PLUS_EXPR
, sizetype
,
2465 size_int (UNITS_PER_WORD
- 1))),
2466 size_int (UNITS_PER_WORD
))),
2467 size_int (UNITS_PER_WORD
)));
2468 r
= expand_expr (tmp
, va_size
, SImode
, EXPAND_NORMAL
);
2470 emit_move_insn (va_size
, r
);
2473 /* First align __va_ndx to a double word boundary if necessary for this arg:
2475 if (__alignof__ (TYPE) > 4)
2476 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8); */
2478 if (TYPE_ALIGN (type
) > BITS_PER_WORD
)
2480 tmp
= build (PLUS_EXPR
, integer_type_node
, ndx
,
2481 build_int_2 ((2 * UNITS_PER_WORD
) - 1, 0));
2482 tmp
= build (BIT_AND_EXPR
, integer_type_node
, tmp
,
2483 build_int_2 (-2 * UNITS_PER_WORD
, -1));
2484 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2485 TREE_SIDE_EFFECTS (tmp
) = 1;
2486 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2490 /* Increment __va_ndx to point past the argument:
2492 orig_ndx = (AP).__va_ndx;
2493 (AP).__va_ndx += __va_size (TYPE); */
2495 orig_ndx
= gen_reg_rtx (SImode
);
2496 r
= expand_expr (ndx
, orig_ndx
, SImode
, EXPAND_NORMAL
);
2498 emit_move_insn (orig_ndx
, r
);
2500 tmp
= build (PLUS_EXPR
, integer_type_node
, ndx
,
2501 make_tree (intSI_type_node
, va_size
));
2502 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2503 TREE_SIDE_EFFECTS (tmp
) = 1;
2504 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2507 /* Check if the argument is in registers:
2509 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2510 && !MUST_PASS_IN_STACK (type))
2511 __array = (AP).__va_reg; */
2513 array
= gen_reg_rtx (Pmode
);
2515 lab_over
= NULL_RTX
;
2516 if (!MUST_PASS_IN_STACK (VOIDmode
, type
))
2518 lab_false
= gen_label_rtx ();
2519 lab_over
= gen_label_rtx ();
2521 emit_cmp_and_jump_insns (expand_expr (ndx
, NULL_RTX
, SImode
,
2523 GEN_INT (MAX_ARGS_IN_REGISTERS
2525 GT
, const1_rtx
, SImode
, 0, lab_false
);
2527 r
= expand_expr (reg
, array
, Pmode
, EXPAND_NORMAL
);
2529 emit_move_insn (array
, r
);
2531 emit_jump_insn (gen_jump (lab_over
));
2533 emit_label (lab_false
);
2536 /* ...otherwise, the argument is on the stack (never split between
2537 registers and the stack -- change __va_ndx if necessary):
2541 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2542 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2543 __array = (AP).__va_stk;
2546 lab_false2
= gen_label_rtx ();
2547 emit_cmp_and_jump_insns (orig_ndx
,
2548 GEN_INT (MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
),
2549 GE
, const1_rtx
, SImode
, 0, lab_false2
);
2551 tmp
= build (PLUS_EXPR
, sizetype
, make_tree (intSI_type_node
, va_size
),
2552 build_int_2 (MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, 0));
2553 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2554 TREE_SIDE_EFFECTS (tmp
) = 1;
2555 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2557 emit_label (lab_false2
);
2559 r
= expand_expr (stk
, array
, Pmode
, EXPAND_NORMAL
);
2561 emit_move_insn (array
, r
);
2563 if (lab_over
!= NULL_RTX
)
2564 emit_label (lab_over
);
2567 /* Given the base array pointer (__array) and index to the subsequent
2568 argument (__va_ndx), find the address:
2570 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2574 The results are endian-dependent because values smaller than one word
2575 are aligned differently. */
2577 size
= gen_reg_rtx (SImode
);
2578 emit_move_insn (size
, va_size
);
2580 if (BYTES_BIG_ENDIAN
)
2582 rtx lab_use_va_size
= gen_label_rtx ();
2584 emit_cmp_and_jump_insns (expand_expr (type_size
, NULL_RTX
, SImode
,
2586 GEN_INT (PARM_BOUNDARY
/ BITS_PER_UNIT
),
2587 GE
, const1_rtx
, SImode
, 0, lab_use_va_size
);
2589 r
= expand_expr (type_size
, size
, SImode
, EXPAND_NORMAL
);
2591 emit_move_insn (size
, r
);
2593 emit_label (lab_use_va_size
);
2596 addr_tree
= build (PLUS_EXPR
, ptr_type_node
,
2597 make_tree (ptr_type_node
, array
),
2599 addr_tree
= build (MINUS_EXPR
, ptr_type_node
, addr_tree
,
2600 make_tree (intSI_type_node
, size
));
2601 addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
2602 addr
= copy_to_reg (addr
);
2608 xtensa_preferred_reload_class (rtx x
, enum reg_class
class, int isoutput
)
2610 if (!isoutput
&& CONSTANT_P (x
) && GET_CODE (x
) == CONST_DOUBLE
)
2613 /* Don't use the stack pointer or hard frame pointer for reloads!
2614 The hard frame pointer would normally be OK except that it may
2615 briefly hold an incoming argument in the prologue, and reload
2616 won't know that it is live because the hard frame pointer is
2617 treated specially. */
2619 if (class == AR_REGS
|| class == GR_REGS
)
2627 xtensa_secondary_reload_class (enum reg_class
class,
2628 enum machine_mode mode ATTRIBUTE_UNUSED
,
2629 rtx x
, int isoutput
)
2633 if (GET_CODE (x
) == SIGN_EXTEND
)
2635 regno
= xt_true_regnum (x
);
2639 if (class == FP_REGS
&& constantpool_mem_p (x
))
2643 if (ACC_REG_P (regno
))
2644 return ((class == GR_REGS
|| class == RL_REGS
) ? NO_REGS
: RL_REGS
);
2645 if (class == ACC_REG
)
2646 return (GP_REG_P (regno
) ? NO_REGS
: RL_REGS
);
2653 order_regs_for_local_alloc (void)
2655 if (!leaf_function_p ())
2657 memcpy (reg_alloc_order
, reg_nonleaf_alloc_order
,
2658 FIRST_PSEUDO_REGISTER
* sizeof (int));
2662 int i
, num_arg_regs
;
2665 /* Use the AR registers in increasing order (skipping a0 and a1)
2666 but save the incoming argument registers for a last resort. */
2667 num_arg_regs
= current_function_args_info
.arg_words
;
2668 if (num_arg_regs
> MAX_ARGS_IN_REGISTERS
)
2669 num_arg_regs
= MAX_ARGS_IN_REGISTERS
;
2670 for (i
= GP_ARG_FIRST
; i
< 16 - num_arg_regs
; i
++)
2671 reg_alloc_order
[nxt
++] = i
+ num_arg_regs
;
2672 for (i
= 0; i
< num_arg_regs
; i
++)
2673 reg_alloc_order
[nxt
++] = GP_ARG_FIRST
+ i
;
2675 /* List the coprocessor registers in order. */
2676 for (i
= 0; i
< BR_REG_NUM
; i
++)
2677 reg_alloc_order
[nxt
++] = BR_REG_FIRST
+ i
;
2679 /* List the FP registers in order for now. */
2680 for (i
= 0; i
< 16; i
++)
2681 reg_alloc_order
[nxt
++] = FP_REG_FIRST
+ i
;
2683 /* GCC requires that we list *all* the registers.... */
2684 reg_alloc_order
[nxt
++] = 0; /* a0 = return address */
2685 reg_alloc_order
[nxt
++] = 1; /* a1 = stack pointer */
2686 reg_alloc_order
[nxt
++] = 16; /* pseudo frame pointer */
2687 reg_alloc_order
[nxt
++] = 17; /* pseudo arg pointer */
2689 reg_alloc_order
[nxt
++] = ACC_REG_FIRST
; /* MAC16 accumulator */
2694 /* A customized version of reg_overlap_mentioned_p that only looks for
2695 references to a7 (as opposed to hard_frame_pointer_rtx). */
2698 a7_overlap_mentioned_p (rtx x
)
2701 unsigned int x_regno
;
2704 if (GET_CODE (x
) == REG
)
2706 x_regno
= REGNO (x
);
2707 return (x
!= hard_frame_pointer_rtx
2708 && x_regno
< A7_REG
+ 1
2709 && x_regno
+ HARD_REGNO_NREGS (A7_REG
, GET_MODE (x
)) > A7_REG
);
2712 if (GET_CODE (x
) == SUBREG
2713 && GET_CODE (SUBREG_REG (x
)) == REG
2714 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
2716 x_regno
= subreg_regno (x
);
2717 return (SUBREG_REG (x
) != hard_frame_pointer_rtx
2718 && x_regno
< A7_REG
+ 1
2719 && x_regno
+ HARD_REGNO_NREGS (A7_REG
, GET_MODE (x
)) > A7_REG
);
2722 /* X does not match, so try its subexpressions. */
2723 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2724 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2728 if (a7_overlap_mentioned_p (XEXP (x
, i
)))
2731 else if (fmt
[i
] == 'E')
2733 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
2734 if (a7_overlap_mentioned_p (XVECEXP (x
, i
, j
)))
2743 /* Some Xtensa targets support multiple bss sections. If the section
2744 name ends with ".bss", add SECTION_BSS to the flags. */
2747 xtensa_multibss_section_type_flags (tree decl
, const char *name
, int reloc
)
2749 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
2752 suffix
= strrchr (name
, '.');
2753 if (suffix
&& strcmp (suffix
, ".bss") == 0)
2755 if (!decl
|| (TREE_CODE (decl
) == VAR_DECL
2756 && DECL_INITIAL (decl
) == NULL_TREE
))
2757 flags
|= SECTION_BSS
; /* @nobits */
2759 warning ("only uninitialized variables can be placed in a "
2767 /* The literal pool stays with the function. */
2770 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED
,
2771 rtx x ATTRIBUTE_UNUSED
,
2772 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2774 function_section (current_function_decl
);
2778 /* Compute a (partial) cost for rtx X. Return true if the complete
2779 cost has been computed, and false if subexpressions should be
2780 scanned. In either case, *TOTAL contains the cost result. */
2783 xtensa_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
2791 if (xtensa_simm12b (INTVAL (x
)))
2798 if (xtensa_simm8 (INTVAL (x
))
2799 || xtensa_simm8x256 (INTVAL (x
)))
2806 if (xtensa_mask_immediate (INTVAL (x
)))
2813 if ((INTVAL (x
) == 0) || xtensa_b4const (INTVAL (x
)))
2824 /* No way to tell if X is the 2nd operand so be conservative. */
2827 if (xtensa_simm12b (INTVAL (x
)))
2829 else if (TARGET_CONST16
)
2830 *total
= COSTS_N_INSNS (2);
2839 *total
= COSTS_N_INSNS (2);
2846 *total
= COSTS_N_INSNS (4);
2854 (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
) ? 2 : 1;
2856 if (memory_address_p (GET_MODE (x
), XEXP ((x
), 0)))
2857 *total
= COSTS_N_INSNS (num_words
);
2859 *total
= COSTS_N_INSNS (2*num_words
);
2864 *total
= COSTS_N_INSNS (TARGET_NSA
? 5 : 50);
2868 *total
= COSTS_N_INSNS ((GET_MODE (x
) == DImode
) ? 3 : 2);
2874 if (GET_MODE (x
) == DImode
)
2875 *total
= COSTS_N_INSNS (2);
2877 *total
= COSTS_N_INSNS (1);
2883 if (GET_MODE (x
) == DImode
)
2884 *total
= COSTS_N_INSNS (50);
2886 *total
= COSTS_N_INSNS (1);
2891 enum machine_mode xmode
= GET_MODE (x
);
2892 if (xmode
== SFmode
)
2893 *total
= COSTS_N_INSNS (TARGET_HARD_FLOAT
? 1 : 50);
2894 else if (xmode
== DFmode
)
2895 *total
= COSTS_N_INSNS (50);
2897 *total
= COSTS_N_INSNS (4);
2904 enum machine_mode xmode
= GET_MODE (x
);
2905 if (xmode
== SFmode
)
2906 *total
= COSTS_N_INSNS (TARGET_HARD_FLOAT
? 1 : 50);
2907 else if (xmode
== DFmode
|| xmode
== DImode
)
2908 *total
= COSTS_N_INSNS (50);
2910 *total
= COSTS_N_INSNS (1);
2915 *total
= COSTS_N_INSNS ((GET_MODE (x
) == DImode
) ? 4 : 2);
2920 enum machine_mode xmode
= GET_MODE (x
);
2921 if (xmode
== SFmode
)
2922 *total
= COSTS_N_INSNS (TARGET_HARD_FLOAT
? 4 : 50);
2923 else if (xmode
== DFmode
|| xmode
== DImode
)
2924 *total
= COSTS_N_INSNS (50);
2925 else if (TARGET_MUL32
)
2926 *total
= COSTS_N_INSNS (4);
2927 else if (TARGET_MAC16
)
2928 *total
= COSTS_N_INSNS (16);
2929 else if (TARGET_MUL16
)
2930 *total
= COSTS_N_INSNS (12);
2932 *total
= COSTS_N_INSNS (50);
2939 enum machine_mode xmode
= GET_MODE (x
);
2940 if (xmode
== SFmode
)
2942 *total
= COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV
? 8 : 50);
2945 else if (xmode
== DFmode
)
2947 *total
= COSTS_N_INSNS (50);
2956 enum machine_mode xmode
= GET_MODE (x
);
2957 if (xmode
== DImode
)
2958 *total
= COSTS_N_INSNS (50);
2959 else if (TARGET_DIV32
)
2960 *total
= COSTS_N_INSNS (32);
2962 *total
= COSTS_N_INSNS (50);
2967 if (GET_MODE (x
) == SFmode
)
2968 *total
= COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT
? 8 : 50);
2970 *total
= COSTS_N_INSNS (50);
2977 *total
= COSTS_N_INSNS (TARGET_MINMAX
? 1 : 50);
2982 *total
= COSTS_N_INSNS (TARGET_SEXT
? 1 : 2);
2987 *total
= COSTS_N_INSNS (1);
2995 #include "gt-xtensa.h"