* config/xtensa/xtensa.c (current_function_arg_words): Delete.
[official-gcc.git] / gcc / config / xtensa / xtensa.c
blob6c059ff69990a3ce3479f69e68b34a00d86a314a
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "function.h"
44 #include "toplev.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
54 of EQ, NE, etc. */
56 enum internal_test
58 ITEST_EQ,
59 ITEST_NE,
60 ITEST_GT,
61 ITEST_GE,
62 ITEST_LT,
63 ITEST_LE,
64 ITEST_GTU,
65 ITEST_GEU,
66 ITEST_LTU,
67 ITEST_LEU,
68 ITEST_MAX
71 /* Cached operands, and operator to compare for use in set/branch on
72 condition codes. */
73 rtx branch_cmp[2];
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool need_a7_copy;
95 bool vararg_a7;
96 rtx set_frame_ptr_insn;
99 /* Vector, indexed by hard register number, which contains 1 for a
100 register that is allowable in a candidate for leaf function
101 treatment. */
103 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
106 1, 1, 1,
107 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
111 /* Map hard register number to register class */
112 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
114 RL_REGS, SP_REG, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
116 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
117 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
118 AR_REGS, AR_REGS, BR_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 ACC_REG,
126 /* Map register constraint character to register class. */
127 enum reg_class xtensa_char_to_class[256] =
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
192 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
195 static int b4const_or_zero (int);
196 static enum internal_test map_test_to_internal_test (enum rtx_code);
197 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
198 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
199 static rtx gen_conditional_move (rtx);
200 static rtx fixup_subreg_mem (rtx);
201 static enum machine_mode xtensa_find_mode_for_size (unsigned);
202 static struct machine_function * xtensa_init_machine_status (void);
203 static bool xtensa_return_in_msb (tree);
204 static void printx (FILE *, signed int);
205 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
206 static rtx xtensa_builtin_saveregs (void);
207 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
208 int) ATTRIBUTE_UNUSED;
209 static void xtensa_select_rtx_section (enum machine_mode, rtx,
210 unsigned HOST_WIDE_INT);
211 static bool xtensa_rtx_costs (rtx, int, int, int *);
212 static tree xtensa_build_builtin_va_list (void);
213 static bool xtensa_return_in_memory (tree, tree);
215 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
216 REG_ALLOC_ORDER;
219 /* This macro generates the assembly code for function exit,
220 on machines that need it. If FUNCTION_EPILOGUE is not defined
221 then individual return instructions are generated for each
222 return statement. Args are same as for FUNCTION_PROLOGUE. */
224 #undef TARGET_ASM_FUNCTION_EPILOGUE
225 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
227 /* These hooks specify assembly directives for creating certain kinds
228 of integer object. */
230 #undef TARGET_ASM_ALIGNED_SI_OP
231 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
233 #undef TARGET_ASM_SELECT_RTX_SECTION
234 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
236 #undef TARGET_RTX_COSTS
237 #define TARGET_RTX_COSTS xtensa_rtx_costs
238 #undef TARGET_ADDRESS_COST
239 #define TARGET_ADDRESS_COST hook_int_rtx_0
241 #undef TARGET_BUILD_BUILTIN_VA_LIST
242 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
244 #undef TARGET_PROMOTE_FUNCTION_ARGS
245 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
246 #undef TARGET_PROMOTE_FUNCTION_RETURN
247 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
248 #undef TARGET_PROMOTE_PROTOTYPES
249 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
251 #undef TARGET_RETURN_IN_MEMORY
252 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
253 #undef TARGET_SPLIT_COMPLEX_ARG
254 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
256 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
257 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
259 #undef TARGET_RETURN_IN_MSB
260 #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
262 struct gcc_target targetm = TARGET_INITIALIZER;
266 * Functions to test Xtensa immediate operand validity.
270 xtensa_b4constu (int v)
272 switch (v)
274 case 32768:
275 case 65536:
276 case 2:
277 case 3:
278 case 4:
279 case 5:
280 case 6:
281 case 7:
282 case 8:
283 case 10:
284 case 12:
285 case 16:
286 case 32:
287 case 64:
288 case 128:
289 case 256:
290 return 1;
292 return 0;
296 xtensa_simm8x256 (int v)
298 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
302 xtensa_ai4const (int v)
304 return (v == -1 || (v >= 1 && v <= 15));
308 xtensa_simm7 (int v)
310 return v >= -32 && v <= 95;
314 xtensa_b4const (int v)
316 switch (v)
318 case -1:
319 case 1:
320 case 2:
321 case 3:
322 case 4:
323 case 5:
324 case 6:
325 case 7:
326 case 8:
327 case 10:
328 case 12:
329 case 16:
330 case 32:
331 case 64:
332 case 128:
333 case 256:
334 return 1;
336 return 0;
340 xtensa_simm8 (int v)
342 return v >= -128 && v <= 127;
346 xtensa_tp7 (int v)
348 return (v >= 7 && v <= 22);
352 xtensa_lsi4x4 (int v)
354 return (v & 3) == 0 && (v >= 0 && v <= 60);
358 xtensa_simm12b (int v)
360 return v >= -2048 && v <= 2047;
364 xtensa_uimm8 (int v)
366 return v >= 0 && v <= 255;
370 xtensa_uimm8x2 (int v)
372 return (v & 1) == 0 && (v >= 0 && v <= 510);
376 xtensa_uimm8x4 (int v)
378 return (v & 3) == 0 && (v >= 0 && v <= 1020);
382 /* This is just like the standard true_regnum() function except that it
383 works even when reg_renumber is not initialized. */
386 xt_true_regnum (rtx x)
388 if (GET_CODE (x) == REG)
390 if (reg_renumber
391 && REGNO (x) >= FIRST_PSEUDO_REGISTER
392 && reg_renumber[REGNO (x)] >= 0)
393 return reg_renumber[REGNO (x)];
394 return REGNO (x);
396 if (GET_CODE (x) == SUBREG)
398 int base = xt_true_regnum (SUBREG_REG (x));
399 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
400 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
401 GET_MODE (SUBREG_REG (x)),
402 SUBREG_BYTE (x), GET_MODE (x));
404 return -1;
409 add_operand (rtx op, enum machine_mode mode)
411 if (GET_CODE (op) == CONST_INT)
412 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
414 return register_operand (op, mode);
419 arith_operand (rtx op, enum machine_mode mode)
421 if (GET_CODE (op) == CONST_INT)
422 return xtensa_simm8 (INTVAL (op));
424 return register_operand (op, mode);
429 nonimmed_operand (rtx op, enum machine_mode mode)
431 /* We cannot use the standard nonimmediate_operand() predicate because
432 it includes constant pool memory operands. */
434 if (memory_operand (op, mode))
435 return !constantpool_address_p (XEXP (op, 0));
437 return register_operand (op, mode);
442 mem_operand (rtx op, enum machine_mode mode)
444 /* We cannot use the standard memory_operand() predicate because
445 it includes constant pool memory operands. */
447 if (memory_operand (op, mode))
448 return !constantpool_address_p (XEXP (op, 0));
450 return FALSE;
455 xtensa_valid_move (enum machine_mode mode, rtx *operands)
457 /* Either the destination or source must be a register, and the
458 MAC16 accumulator doesn't count. */
460 if (register_operand (operands[0], mode))
462 int dst_regnum = xt_true_regnum (operands[0]);
464 /* The stack pointer can only be assigned with a MOVSP opcode. */
465 if (dst_regnum == STACK_POINTER_REGNUM)
466 return (mode == SImode
467 && register_operand (operands[1], mode)
468 && !ACC_REG_P (xt_true_regnum (operands[1])));
470 if (!ACC_REG_P (dst_regnum))
471 return true;
473 if (register_operand (operands[1], mode))
475 int src_regnum = xt_true_regnum (operands[1]);
476 if (!ACC_REG_P (src_regnum))
477 return true;
479 return FALSE;
484 mask_operand (rtx op, enum machine_mode mode)
486 if (GET_CODE (op) == CONST_INT)
487 return xtensa_mask_immediate (INTVAL (op));
489 return register_operand (op, mode);
494 extui_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
496 return ((GET_CODE (op) == CONST_INT)
497 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
502 sext_operand (rtx op, enum machine_mode mode)
504 if (TARGET_SEXT)
505 return nonimmed_operand (op, mode);
506 return mem_operand (op, mode);
511 sext_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
513 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
518 lsbitnum_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
520 if (GET_CODE (op) == CONST_INT)
522 return (BITS_BIG_ENDIAN
523 ? (INTVAL (op) == BITS_PER_WORD-1)
524 : (INTVAL (op) == 0));
526 return FALSE;
530 static int
531 b4const_or_zero (int v)
533 if (v == 0)
534 return TRUE;
535 return xtensa_b4const (v);
540 branch_operand (rtx op, enum machine_mode mode)
542 if (GET_CODE (op) == CONST_INT)
543 return b4const_or_zero (INTVAL (op));
545 return register_operand (op, mode);
550 ubranch_operand (rtx op, enum machine_mode mode)
552 if (GET_CODE (op) == CONST_INT)
553 return xtensa_b4constu (INTVAL (op));
555 return register_operand (op, mode);
560 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
562 if ((GET_CODE (op) == REG)
563 && (op != arg_pointer_rtx)
564 && ((REGNO (op) < FRAME_POINTER_REGNUM)
565 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
566 return TRUE;
568 if (CONSTANT_ADDRESS_P (op))
570 /* Direct calls only allowed to static functions with PIC. */
571 if (flag_pic)
573 tree callee, callee_sec, caller_sec;
575 if (GET_CODE (op) != SYMBOL_REF || !SYMBOL_REF_LOCAL_P (op))
576 return FALSE;
578 /* Don't attempt a direct call if the callee is known to be in
579 a different section, since there's a good chance it will be
580 out of range. */
582 if (flag_function_sections
583 || DECL_ONE_ONLY (current_function_decl))
584 return FALSE;
585 caller_sec = DECL_SECTION_NAME (current_function_decl);
586 callee = SYMBOL_REF_DECL (op);
587 if (callee)
589 if (DECL_ONE_ONLY (callee))
590 return FALSE;
591 callee_sec = DECL_SECTION_NAME (callee);
592 if (((caller_sec == NULL_TREE) ^ (callee_sec == NULL_TREE))
593 || (caller_sec != NULL_TREE
594 && strcmp (TREE_STRING_POINTER (caller_sec),
595 TREE_STRING_POINTER (callee_sec)) != 0))
596 return FALSE;
598 else if (caller_sec != NULL_TREE)
599 return FALSE;
601 return TRUE;
604 return FALSE;
609 move_operand (rtx op, enum machine_mode mode)
611 if (register_operand (op, mode)
612 || memory_operand (op, mode))
613 return TRUE;
615 switch (mode)
617 case DFmode:
618 case SFmode:
619 return TARGET_CONST16 && CONSTANT_P (op);
621 case DImode:
622 case SImode:
623 if (TARGET_CONST16)
624 return CONSTANT_P (op);
625 /* Fall through. */
627 case HImode:
628 case QImode:
629 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
630 result in 0/1. */
631 if (GET_CODE (op) == CONSTANT_P_RTX)
632 return TRUE;
634 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
635 return TRUE;
636 break;
638 default:
639 break;
642 return FALSE;
647 smalloffset_mem_p (rtx op)
649 if (GET_CODE (op) == MEM)
651 rtx addr = XEXP (op, 0);
652 if (GET_CODE (addr) == REG)
653 return REG_OK_FOR_BASE_P (addr);
654 if (GET_CODE (addr) == PLUS)
656 rtx offset = XEXP (addr, 0);
657 if (GET_CODE (offset) != CONST_INT)
658 offset = XEXP (addr, 1);
659 if (GET_CODE (offset) != CONST_INT)
660 return FALSE;
661 return xtensa_lsi4x4 (INTVAL (offset));
664 return FALSE;
669 constantpool_address_p (rtx addr)
671 rtx sym = addr;
673 if (GET_CODE (addr) == CONST)
675 rtx offset;
677 /* Only handle (PLUS (SYM, OFFSET)) form. */
678 addr = XEXP (addr, 0);
679 if (GET_CODE (addr) != PLUS)
680 return FALSE;
682 /* Make sure the address is word aligned. */
683 offset = XEXP (addr, 1);
684 if ((GET_CODE (offset) != CONST_INT)
685 || ((INTVAL (offset) & 3) != 0))
686 return FALSE;
688 sym = XEXP (addr, 0);
691 if ((GET_CODE (sym) == SYMBOL_REF)
692 && CONSTANT_POOL_ADDRESS_P (sym))
693 return TRUE;
694 return FALSE;
699 constantpool_mem_p (rtx op)
701 if (GET_CODE (op) == MEM)
702 return constantpool_address_p (XEXP (op, 0));
703 return FALSE;
707 /* Accept the floating point constant 1 in the appropriate mode. */
710 const_float_1_operand (rtx op, enum machine_mode mode)
712 REAL_VALUE_TYPE d;
713 static REAL_VALUE_TYPE onedf;
714 static REAL_VALUE_TYPE onesf;
715 static int one_initialized;
717 if ((GET_CODE (op) != CONST_DOUBLE)
718 || (mode != GET_MODE (op))
719 || (mode != DFmode && mode != SFmode))
720 return FALSE;
722 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
724 if (! one_initialized)
726 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
727 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
728 one_initialized = TRUE;
731 if (mode == DFmode)
732 return REAL_VALUES_EQUAL (d, onedf);
733 else
734 return REAL_VALUES_EQUAL (d, onesf);
739 fpmem_offset_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
741 if (GET_CODE (op) == CONST_INT)
742 return xtensa_mem_offset (INTVAL (op), SFmode);
743 return 0;
747 void
748 xtensa_extend_reg (rtx dst, rtx src)
750 rtx temp = gen_reg_rtx (SImode);
751 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
753 /* Generate paradoxical subregs as needed so that the modes match. */
754 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
755 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
757 emit_insn (gen_ashlsi3 (temp, src, shift));
758 emit_insn (gen_ashrsi3 (dst, temp, shift));
763 branch_operator (rtx x, enum machine_mode mode)
765 if (GET_MODE (x) != mode)
766 return FALSE;
768 switch (GET_CODE (x))
770 case EQ:
771 case NE:
772 case LT:
773 case GE:
774 return TRUE;
775 default:
776 break;
778 return FALSE;
783 ubranch_operator (rtx x, enum machine_mode mode)
785 if (GET_MODE (x) != mode)
786 return FALSE;
788 switch (GET_CODE (x))
790 case LTU:
791 case GEU:
792 return TRUE;
793 default:
794 break;
796 return FALSE;
801 boolean_operator (rtx x, enum machine_mode mode)
803 if (GET_MODE (x) != mode)
804 return FALSE;
806 switch (GET_CODE (x))
808 case EQ:
809 case NE:
810 return TRUE;
811 default:
812 break;
814 return FALSE;
819 xtensa_mask_immediate (int v)
821 #define MAX_MASK_SIZE 16
822 int mask_size;
824 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
826 if ((v & 1) == 0)
827 return FALSE;
828 v = v >> 1;
829 if (v == 0)
830 return TRUE;
833 return FALSE;
838 xtensa_mem_offset (unsigned v, enum machine_mode mode)
840 switch (mode)
842 case BLKmode:
843 /* Handle the worst case for block moves. See xtensa_expand_block_move
844 where we emit an optimized block move operation if the block can be
845 moved in < "move_ratio" pieces. The worst case is when the block is
846 aligned but has a size of (3 mod 4) (does this happen?) so that the
847 last piece requires a byte load/store. */
848 return (xtensa_uimm8 (v)
849 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
851 case QImode:
852 return xtensa_uimm8 (v);
854 case HImode:
855 return xtensa_uimm8x2 (v);
857 case DFmode:
858 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
860 default:
861 break;
864 return xtensa_uimm8x4 (v);
868 /* Make normal rtx_code into something we can index from an array. */
870 static enum internal_test
871 map_test_to_internal_test (enum rtx_code test_code)
873 enum internal_test test = ITEST_MAX;
875 switch (test_code)
877 default: break;
878 case EQ: test = ITEST_EQ; break;
879 case NE: test = ITEST_NE; break;
880 case GT: test = ITEST_GT; break;
881 case GE: test = ITEST_GE; break;
882 case LT: test = ITEST_LT; break;
883 case LE: test = ITEST_LE; break;
884 case GTU: test = ITEST_GTU; break;
885 case GEU: test = ITEST_GEU; break;
886 case LTU: test = ITEST_LTU; break;
887 case LEU: test = ITEST_LEU; break;
890 return test;
894 /* Generate the code to compare two integer values. The return value is
895 the comparison expression. */
897 static rtx
898 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
899 rtx cmp0, /* first operand to compare */
900 rtx cmp1, /* second operand to compare */
901 int *p_invert /* whether branch needs to reverse test */)
903 struct cmp_info
905 enum rtx_code test_code; /* test code to use in insn */
906 int (*const_range_p) (int); /* predicate function to check range */
907 int const_add; /* constant to add (convert LE -> LT) */
908 int reverse_regs; /* reverse registers in test */
909 int invert_const; /* != 0 if invert value if cmp1 is constant */
910 int invert_reg; /* != 0 if invert value if cmp1 is register */
911 int unsignedp; /* != 0 for unsigned comparisons. */
914 static struct cmp_info info[ (int)ITEST_MAX ] = {
916 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
917 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
919 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
920 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
921 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
922 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
924 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
925 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
926 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
927 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
930 enum internal_test test;
931 enum machine_mode mode;
932 struct cmp_info *p_info;
934 test = map_test_to_internal_test (test_code);
935 if (test == ITEST_MAX)
936 abort ();
938 p_info = &info[ (int)test ];
940 mode = GET_MODE (cmp0);
941 if (mode == VOIDmode)
942 mode = GET_MODE (cmp1);
944 /* Make sure we can handle any constants given to us. */
945 if (GET_CODE (cmp1) == CONST_INT)
947 HOST_WIDE_INT value = INTVAL (cmp1);
948 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
950 /* if the immediate overflows or does not fit in the immediate field,
951 spill it to a register */
953 if ((p_info->unsignedp ?
954 (uvalue + p_info->const_add > uvalue) :
955 (value + p_info->const_add > value)) != (p_info->const_add > 0))
957 cmp1 = force_reg (mode, cmp1);
959 else if (!(p_info->const_range_p) (value + p_info->const_add))
961 cmp1 = force_reg (mode, cmp1);
964 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
966 cmp1 = force_reg (mode, cmp1);
969 /* See if we need to invert the result. */
970 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
971 ? p_info->invert_const
972 : p_info->invert_reg);
974 /* Comparison to constants, may involve adding 1 to change a LT into LE.
975 Comparison between two registers, may involve switching operands. */
976 if (GET_CODE (cmp1) == CONST_INT)
978 if (p_info->const_add != 0)
979 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
982 else if (p_info->reverse_regs)
984 rtx temp = cmp0;
985 cmp0 = cmp1;
986 cmp1 = temp;
989 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
993 /* Generate the code to compare two float values. The return value is
994 the comparison expression. */
996 static rtx
997 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
998 rtx cmp0, /* first operand to compare */
999 rtx cmp1 /* second operand to compare */)
1001 rtx (*gen_fn) (rtx, rtx, rtx);
1002 rtx brtmp;
1003 int reverse_regs, invert;
1005 switch (test_code)
1007 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1008 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1009 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1010 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1011 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1012 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1013 default:
1014 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1015 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1018 if (reverse_regs)
1020 rtx temp = cmp0;
1021 cmp0 = cmp1;
1022 cmp1 = temp;
1025 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1026 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1028 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1032 void
1033 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
1035 enum cmp_type type = branch_type;
1036 rtx cmp0 = branch_cmp[0];
1037 rtx cmp1 = branch_cmp[1];
1038 rtx cmp;
1039 int invert;
1040 rtx label1, label2;
1042 switch (type)
1044 case CMP_DF:
1045 default:
1046 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1048 case CMP_SI:
1049 invert = FALSE;
1050 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1051 break;
1053 case CMP_SF:
1054 if (!TARGET_HARD_FLOAT)
1055 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1056 invert = FALSE;
1057 cmp = gen_float_relational (test_code, cmp0, cmp1);
1058 break;
1061 /* Generate the branch. */
1063 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1064 label2 = pc_rtx;
1066 if (invert)
1068 label2 = label1;
1069 label1 = pc_rtx;
1072 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1073 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1074 label1,
1075 label2)));
1079 static rtx
1080 gen_conditional_move (rtx cmp)
1082 enum rtx_code code = GET_CODE (cmp);
1083 rtx op0 = branch_cmp[0];
1084 rtx op1 = branch_cmp[1];
1086 if (branch_type == CMP_SI)
1088 /* Jump optimization calls get_condition() which canonicalizes
1089 comparisons like (GE x <const>) to (GT x <const-1>).
1090 Transform those comparisons back to GE, since that is the
1091 comparison supported in Xtensa. We shouldn't have to
1092 transform <LE x const> comparisons, because neither
1093 xtensa_expand_conditional_branch() nor get_condition() will
1094 produce them. */
1096 if ((code == GT) && (op1 == constm1_rtx))
1098 code = GE;
1099 op1 = const0_rtx;
1101 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
1103 if (boolean_operator (cmp, VOIDmode))
1105 /* Swap the operands to make const0 second. */
1106 if (op0 == const0_rtx)
1108 op0 = op1;
1109 op1 = const0_rtx;
1112 /* If not comparing against zero, emit a comparison (subtract). */
1113 if (op1 != const0_rtx)
1115 op0 = expand_binop (SImode, sub_optab, op0, op1,
1116 0, 0, OPTAB_LIB_WIDEN);
1117 op1 = const0_rtx;
1120 else if (branch_operator (cmp, VOIDmode))
1122 /* Swap the operands to make const0 second. */
1123 if (op0 == const0_rtx)
1125 op0 = op1;
1126 op1 = const0_rtx;
1128 switch (code)
1130 case LT: code = GE; break;
1131 case GE: code = LT; break;
1132 default: abort ();
1136 if (op1 != const0_rtx)
1137 return 0;
1139 else
1140 return 0;
1142 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1145 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1146 return gen_float_relational (code, op0, op1);
1148 return 0;
1153 xtensa_expand_conditional_move (rtx *operands, int isflt)
1155 rtx cmp;
1156 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1158 if (!(cmp = gen_conditional_move (operands[1])))
1159 return 0;
1161 if (isflt)
1162 gen_fn = (branch_type == CMP_SI
1163 ? gen_movsfcc_internal0
1164 : gen_movsfcc_internal1);
1165 else
1166 gen_fn = (branch_type == CMP_SI
1167 ? gen_movsicc_internal0
1168 : gen_movsicc_internal1);
1170 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1171 operands[2], operands[3], cmp));
1172 return 1;
1177 xtensa_expand_scc (rtx *operands)
1179 rtx dest = operands[0];
1180 rtx cmp = operands[1];
1181 rtx one_tmp, zero_tmp;
1182 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1184 if (!(cmp = gen_conditional_move (cmp)))
1185 return 0;
1187 one_tmp = gen_reg_rtx (SImode);
1188 zero_tmp = gen_reg_rtx (SImode);
1189 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1190 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1192 gen_fn = (branch_type == CMP_SI
1193 ? gen_movsicc_internal0
1194 : gen_movsicc_internal1);
1195 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1196 return 1;
1200 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1201 for the output, i.e., the input operands are twice as big as MODE. */
1203 void
1204 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
1206 switch (GET_CODE (operands[1]))
1208 case REG:
1209 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1210 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1211 break;
1213 case MEM:
1214 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1215 operands[2] = adjust_address (operands[1], mode, 0);
1216 break;
1218 case CONST_INT:
1219 case CONST_DOUBLE:
1220 split_double (operands[1], &operands[2], &operands[3]);
1221 break;
1223 default:
1224 abort ();
1227 switch (GET_CODE (operands[0]))
1229 case REG:
1230 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1231 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1232 break;
1234 case MEM:
1235 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1236 operands[0] = adjust_address (operands[0], mode, 0);
1237 break;
1239 default:
1240 abort ();
1245 /* Emit insns to move operands[1] into operands[0].
1246 Return 1 if we have written out everything that needs to be done to
1247 do the move. Otherwise, return 0 and the caller will emit the move
1248 normally. */
1251 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1253 if (CONSTANT_P (operands[1])
1254 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1255 && (GET_CODE (operands[1]) != CONST_INT
1256 || !xtensa_simm12b (INTVAL (operands[1]))))
1258 if (!TARGET_CONST16)
1259 operands[1] = force_const_mem (SImode, operands[1]);
1261 /* PC-relative loads are always SImode, and CONST16 is only
1262 supported in the movsi pattern, so add a SUBREG for any other
1263 (smaller) mode. */
1265 if (mode != SImode)
1267 if (register_operand (operands[0], mode))
1269 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1270 emit_move_insn (operands[0], operands[1]);
1271 return 1;
1273 else
1275 operands[1] = force_reg (SImode, operands[1]);
1276 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1281 if (!(reload_in_progress | reload_completed)
1282 && !xtensa_valid_move (mode, operands))
1283 operands[1] = force_reg (mode, operands[1]);
1285 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1287 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1288 instruction won't be recognized after reload, so we remove the
1289 subreg and adjust mem accordingly. */
1290 if (reload_in_progress)
1292 operands[0] = fixup_subreg_mem (operands[0]);
1293 operands[1] = fixup_subreg_mem (operands[1]);
1295 return 0;
1299 static rtx
1300 fixup_subreg_mem (rtx x)
1302 if (GET_CODE (x) == SUBREG
1303 && GET_CODE (SUBREG_REG (x)) == REG
1304 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1306 rtx temp =
1307 gen_rtx_SUBREG (GET_MODE (x),
1308 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1309 SUBREG_BYTE (x));
1310 x = alter_subreg (&temp);
1312 return x;
1316 /* Check if an incoming argument in a7 is expected to be used soon and
1317 if OPND is a register or register pair that includes a7. If so,
1318 create a new pseudo and copy a7 into that pseudo at the very
1319 beginning of the function, followed by the special "set_frame_ptr"
1320 unspec_volatile insn. The return value is either the original
1321 operand, if it is not a7, or the new pseudo containing a copy of
1322 the incoming argument. This is necessary because the register
1323 allocator will ignore conflicts with a7 and may either assign some
1324 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1325 the incoming argument in a7. By copying the argument out of a7 as
1326 the very first thing, and then immediately following that with an
1327 unspec_volatile to keep the scheduler away, we should avoid any
1328 problems. Putting the set_frame_ptr insn at the beginning, with
1329 only the a7 copy before it, also makes it easier for the prologue
1330 expander to initialize the frame pointer after the a7 copy and to
1331 fix up the a7 copy to use the stack pointer instead of the frame
1332 pointer. */
1335 xtensa_copy_incoming_a7 (rtx opnd)
1337 rtx entry_insns = 0;
1338 rtx reg, tmp;
1339 enum machine_mode mode;
1341 if (!cfun->machine->need_a7_copy)
1342 return opnd;
1344 /* This function should never be called again once a7 has been copied. */
1345 if (cfun->machine->set_frame_ptr_insn)
1346 abort ();
1348 mode = GET_MODE (opnd);
1350 /* The operand using a7 may come in a later instruction, so just return
1351 the original operand if it doesn't use a7. */
1352 reg = opnd;
1353 if (GET_CODE (reg) == SUBREG)
1355 if (SUBREG_BYTE (reg) != 0)
1356 abort ();
1357 reg = SUBREG_REG (reg);
1359 if (GET_CODE (reg) != REG
1360 || REGNO (reg) > A7_REG
1361 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1362 return opnd;
1364 /* 1-word args will always be in a7; 2-word args in a6/a7. */
1365 if (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 != A7_REG)
1366 abort ();
1368 cfun->machine->need_a7_copy = false;
1370 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1371 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1373 push_to_sequence (entry_insns);
1374 tmp = gen_reg_rtx (mode);
1376 switch (mode)
1378 case DFmode:
1379 case DImode:
1380 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1381 gen_rtx_REG (SImode, A7_REG - 1)));
1382 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1383 gen_raw_REG (SImode, A7_REG)));
1384 break;
1385 case SFmode:
1386 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1387 break;
1388 case SImode:
1389 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1390 break;
1391 case HImode:
1392 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1393 break;
1394 case QImode:
1395 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1396 break;
1397 default:
1398 abort ();
1401 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1402 entry_insns = get_insns ();
1403 end_sequence ();
1405 if (cfun->machine->vararg_a7)
1407 /* This is called from within builtin_savereg, so we're already
1408 inside a start_sequence that will be placed at the start of
1409 the function. */
1410 emit_insn (entry_insns);
1412 else
1414 /* Put entry_insns after the NOTE that starts the function. If
1415 this is inside a start_sequence, make the outer-level insn
1416 chain current, so the code is placed at the start of the
1417 function. */
1418 push_topmost_sequence ();
1419 emit_insn_after (entry_insns, get_insns ());
1420 pop_topmost_sequence ();
1423 return tmp;
1427 /* Try to expand a block move operation to an RTL block move instruction.
1428 If not optimizing or if the block size is not a constant or if the
1429 block is small, the expansion fails and GCC falls back to calling
1430 memcpy().
1432 operands[0] is the destination
1433 operands[1] is the source
1434 operands[2] is the length
1435 operands[3] is the alignment */
1438 xtensa_expand_block_move (rtx *operands)
1440 rtx dest = operands[0];
1441 rtx src = operands[1];
1442 int bytes = INTVAL (operands[2]);
1443 int align = XINT (operands[3], 0);
1444 int num_pieces, move_ratio;
1446 /* If this is not a fixed size move, just call memcpy. */
1447 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1448 return 0;
1450 /* Anything to move? */
1451 if (bytes <= 0)
1452 return 1;
1454 if (align > MOVE_MAX)
1455 align = MOVE_MAX;
1457 /* Decide whether to expand inline based on the optimization level. */
1458 move_ratio = 4;
1459 if (optimize > 2)
1460 move_ratio = LARGEST_MOVE_RATIO;
1461 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1462 if (num_pieces >= move_ratio)
1463 return 0;
1465 /* Make sure the memory addresses are valid. */
1466 operands[0] = validize_mem (dest);
1467 operands[1] = validize_mem (src);
1469 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1470 operands[2], operands[3]));
1471 return 1;
1475 /* Emit a sequence of instructions to implement a block move, trying
1476 to hide load delay slots as much as possible. Load N values into
1477 temporary registers, store those N values, and repeat until the
1478 complete block has been moved. N=delay_slots+1. */
1480 struct meminsnbuf
1482 char template[30];
1483 rtx operands[2];
1486 void
1487 xtensa_emit_block_move (rtx *operands, rtx *tmpregs, int delay_slots)
1489 rtx dest = operands[0];
1490 rtx src = operands[1];
1491 int bytes = INTVAL (operands[2]);
1492 int align = XINT (operands[3], 0);
1493 rtx from_addr = XEXP (src, 0);
1494 rtx to_addr = XEXP (dest, 0);
1495 int from_struct = MEM_IN_STRUCT_P (src);
1496 int to_struct = MEM_IN_STRUCT_P (dest);
1497 int offset = 0;
1498 int chunk_size, item_size;
1499 struct meminsnbuf *ldinsns, *stinsns;
1500 const char *ldname, *stname;
1501 enum machine_mode mode;
1503 if (align > MOVE_MAX)
1504 align = MOVE_MAX;
1505 item_size = align;
1506 chunk_size = delay_slots + 1;
1508 ldinsns = (struct meminsnbuf *)
1509 alloca (chunk_size * sizeof (struct meminsnbuf));
1510 stinsns = (struct meminsnbuf *)
1511 alloca (chunk_size * sizeof (struct meminsnbuf));
1513 mode = xtensa_find_mode_for_size (item_size);
1514 item_size = GET_MODE_SIZE (mode);
1515 ldname = xtensa_ld_opcodes[(int) mode];
1516 stname = xtensa_st_opcodes[(int) mode];
1518 while (bytes > 0)
1520 int n;
1522 for (n = 0; n < chunk_size; n++)
1524 rtx addr, mem;
1526 if (bytes == 0)
1528 chunk_size = n;
1529 break;
1532 if (bytes < item_size)
1534 /* Find a smaller item_size which we can load & store. */
1535 item_size = bytes;
1536 mode = xtensa_find_mode_for_size (item_size);
1537 item_size = GET_MODE_SIZE (mode);
1538 ldname = xtensa_ld_opcodes[(int) mode];
1539 stname = xtensa_st_opcodes[(int) mode];
1542 /* Record the load instruction opcode and operands. */
1543 addr = plus_constant (from_addr, offset);
1544 mem = gen_rtx_MEM (mode, addr);
1545 if (! memory_address_p (mode, addr))
1546 abort ();
1547 MEM_IN_STRUCT_P (mem) = from_struct;
1548 ldinsns[n].operands[0] = tmpregs[n];
1549 ldinsns[n].operands[1] = mem;
1550 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1552 /* Record the store instruction opcode and operands. */
1553 addr = plus_constant (to_addr, offset);
1554 mem = gen_rtx_MEM (mode, addr);
1555 if (! memory_address_p (mode, addr))
1556 abort ();
1557 MEM_IN_STRUCT_P (mem) = to_struct;
1558 stinsns[n].operands[0] = tmpregs[n];
1559 stinsns[n].operands[1] = mem;
1560 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1562 offset += item_size;
1563 bytes -= item_size;
1566 /* Now output the loads followed by the stores. */
1567 for (n = 0; n < chunk_size; n++)
1568 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1569 for (n = 0; n < chunk_size; n++)
1570 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1575 static enum machine_mode
1576 xtensa_find_mode_for_size (unsigned item_size)
1578 enum machine_mode mode, tmode;
1580 while (1)
1582 mode = VOIDmode;
1584 /* Find mode closest to but not bigger than item_size. */
1585 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1586 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1587 if (GET_MODE_SIZE (tmode) <= item_size)
1588 mode = tmode;
1589 if (mode == VOIDmode)
1590 abort ();
1592 item_size = GET_MODE_SIZE (mode);
1594 if (xtensa_ld_opcodes[(int) mode]
1595 && xtensa_st_opcodes[(int) mode])
1596 break;
1598 /* Cannot load & store this mode; try something smaller. */
1599 item_size -= 1;
1602 return mode;
1606 void
1607 xtensa_expand_nonlocal_goto (rtx *operands)
1609 rtx goto_handler = operands[1];
1610 rtx containing_fp = operands[3];
1612 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1613 is too big to generate in-line. */
1615 if (GET_CODE (containing_fp) != REG)
1616 containing_fp = force_reg (Pmode, containing_fp);
1618 goto_handler = replace_rtx (copy_rtx (goto_handler),
1619 virtual_stack_vars_rtx,
1620 containing_fp);
1622 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1623 0, VOIDmode, 2,
1624 containing_fp, Pmode,
1625 goto_handler, Pmode);
1629 static struct machine_function *
1630 xtensa_init_machine_status (void)
1632 return ggc_alloc_cleared (sizeof (struct machine_function));
1636 void
1637 xtensa_setup_frame_addresses (void)
1639 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1640 cfun->machine->accesses_prev_frame = 1;
1642 emit_library_call
1643 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1644 0, VOIDmode, 0);
1648 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1649 a comment showing where the end of the loop is. However, if there is a
1650 label or a branch at the end of the loop then we need to place a nop
1651 there. If the loop ends with a label we need the nop so that branches
1652 targeting that label will target the nop (and thus remain in the loop),
1653 instead of targeting the instruction after the loop (and thus exiting
1654 the loop). If the loop ends with a branch, we need the nop in case the
1655 branch is targeting a location inside the loop. When the branch
1656 executes it will cause the loop count to be decremented even if it is
1657 taken (because it is the last instruction in the loop), so we need to
1658 nop after the branch to prevent the loop count from being decremented
1659 when the branch is taken. */
1661 void
1662 xtensa_emit_loop_end (rtx insn, rtx *operands)
1664 char done = 0;
1666 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1668 switch (GET_CODE (insn))
1670 case NOTE:
1671 case BARRIER:
1672 break;
1674 case CODE_LABEL:
1675 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1676 done = 1;
1677 break;
1679 default:
1681 rtx body = PATTERN (insn);
1683 if (GET_CODE (body) == JUMP_INSN)
1685 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1686 done = 1;
1688 else if ((GET_CODE (body) != USE)
1689 && (GET_CODE (body) != CLOBBER))
1690 done = 1;
1692 break;
1696 output_asm_insn ("# loop end for %0", operands);
1700 char *
1701 xtensa_emit_call (int callop, rtx *operands)
1703 static char result[64];
1704 rtx tgt = operands[callop];
1706 if (GET_CODE (tgt) == CONST_INT)
1707 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1708 else if (register_operand (tgt, VOIDmode))
1709 sprintf (result, "callx8\t%%%d", callop);
1710 else
1711 sprintf (result, "call8\t%%%d", callop);
1713 return result;
1717 /* Return the debugger register number to use for 'regno'. */
1720 xtensa_dbx_register_number (int regno)
1722 int first = -1;
1724 if (GP_REG_P (regno))
1726 regno -= GP_REG_FIRST;
1727 first = 0;
1729 else if (BR_REG_P (regno))
1731 regno -= BR_REG_FIRST;
1732 first = 16;
1734 else if (FP_REG_P (regno))
1736 regno -= FP_REG_FIRST;
1737 first = 48;
1739 else if (ACC_REG_P (regno))
1741 first = 0x200; /* Start of Xtensa special registers. */
1742 regno = 16; /* ACCLO is special register 16. */
1745 /* When optimizing, we sometimes get asked about pseudo-registers
1746 that don't represent hard registers. Return 0 for these. */
1747 if (first == -1)
1748 return 0;
1750 return first + regno;
1754 /* Argument support functions. */
1756 /* Initialize CUMULATIVE_ARGS for a function. */
1758 void
1759 init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
1761 cum->arg_words = 0;
1762 cum->incoming = incoming;
1766 /* Advance the argument to the next argument position. */
1768 void
1769 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1771 int words, max;
1772 int *arg_words;
1774 arg_words = &cum->arg_words;
1775 max = MAX_ARGS_IN_REGISTERS;
1777 words = (((mode != BLKmode)
1778 ? (int) GET_MODE_SIZE (mode)
1779 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1781 if ((*arg_words + words > max) && (*arg_words < max))
1782 *arg_words = max;
1784 *arg_words += words;
1788 /* Return an RTL expression containing the register for the given mode,
1789 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1790 if this is an incoming argument to the current function. */
1793 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1794 int incoming_p)
1796 int regbase, words, max;
1797 int *arg_words;
1798 int regno;
1800 arg_words = &cum->arg_words;
1801 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1802 max = MAX_ARGS_IN_REGISTERS;
1804 words = (((mode != BLKmode)
1805 ? (int) GET_MODE_SIZE (mode)
1806 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1808 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1810 int align = TYPE_ALIGN (type) / BITS_PER_WORD;
1811 *arg_words = (*arg_words + align - 1) & -align;
1814 if (*arg_words + words > max)
1815 return (rtx)0;
1817 regno = regbase + *arg_words;
1819 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1820 cfun->machine->need_a7_copy = true;
1822 return gen_rtx_REG (mode, regno);
1826 static bool
1827 xtensa_return_in_msb (tree valtype)
1829 return (TARGET_BIG_ENDIAN
1830 && AGGREGATE_TYPE_P (valtype)
1831 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1835 void
1836 override_options (void)
1838 int regno;
1839 enum machine_mode mode;
1841 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1842 error ("boolean registers required for the floating-point option");
1844 /* Set up the tables of ld/st opcode names for block moves. */
1845 xtensa_ld_opcodes[(int) SImode] = "l32i";
1846 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1847 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1848 xtensa_st_opcodes[(int) SImode] = "s32i";
1849 xtensa_st_opcodes[(int) HImode] = "s16i";
1850 xtensa_st_opcodes[(int) QImode] = "s8i";
1852 xtensa_char_to_class['q'] = SP_REG;
1853 xtensa_char_to_class['a'] = GR_REGS;
1854 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1855 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1856 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1857 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1858 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1859 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1860 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1861 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1863 /* Set up array giving whether a given register can hold a given mode. */
1864 for (mode = VOIDmode;
1865 mode != MAX_MACHINE_MODE;
1866 mode = (enum machine_mode) ((int) mode + 1))
1868 int size = GET_MODE_SIZE (mode);
1869 enum mode_class class = GET_MODE_CLASS (mode);
1871 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1873 int temp;
1875 if (ACC_REG_P (regno))
1876 temp = (TARGET_MAC16
1877 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1878 else if (GP_REG_P (regno))
1879 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1880 else if (FP_REG_P (regno))
1881 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1882 else if (BR_REG_P (regno))
1883 temp = (TARGET_BOOLEANS && (mode == CCmode));
1884 else
1885 temp = FALSE;
1887 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1891 init_machine_status = xtensa_init_machine_status;
1893 /* Check PIC settings. PIC is only supported when using L32R
1894 instructions, and some targets need to always use PIC. */
1895 if (flag_pic && TARGET_CONST16)
1896 error ("-f%s is not supported with CONST16 instructions",
1897 (flag_pic > 1 ? "PIC" : "pic"));
1898 else if (XTENSA_ALWAYS_PIC)
1900 if (TARGET_CONST16)
1901 error ("PIC is required but not supported with CONST16 instructions");
1902 flag_pic = 1;
1904 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1905 if (flag_pic > 1)
1906 flag_pic = 1;
1910 /* A C compound statement to output to stdio stream STREAM the
1911 assembler syntax for an instruction operand X. X is an RTL
1912 expression.
1914 CODE is a value that can be used to specify one of several ways
1915 of printing the operand. It is used when identical operands
1916 must be printed differently depending on the context. CODE
1917 comes from the '%' specification that was used to request
1918 printing of the operand. If the specification was just '%DIGIT'
1919 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1920 is the ASCII code for LTR.
1922 If X is a register, this macro should print the register's name.
1923 The names can be found in an array 'reg_names' whose type is
1924 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1926 When the machine description has a specification '%PUNCT' (a '%'
1927 followed by a punctuation character), this macro is called with
1928 a null pointer for X and the punctuation character for CODE.
1930 'a', 'c', 'l', and 'n' are reserved.
1932 The Xtensa specific codes are:
1934 'd' CONST_INT, print as signed decimal
1935 'x' CONST_INT, print as signed hexadecimal
1936 'K' CONST_INT, print number of bits in mask for EXTUI
1937 'R' CONST_INT, print (X & 0x1f)
1938 'L' CONST_INT, print ((32 - X) & 0x1f)
1939 'D' REG, print second register of double-word register operand
1940 'N' MEM, print address of next word following a memory operand
1941 'v' MEM, if memory reference is volatile, output a MEMW before it
1942 't' any constant, add "@h" suffix for top 16 bits
1943 'b' any constant, add "@l" suffix for bottom 16 bits
1946 static void
1947 printx (FILE *file, signed int val)
1949 /* Print a hexadecimal value in a nice way. */
1950 if ((val > -0xa) && (val < 0xa))
1951 fprintf (file, "%d", val);
1952 else if (val < 0)
1953 fprintf (file, "-0x%x", -val);
1954 else
1955 fprintf (file, "0x%x", val);
1959 void
1960 print_operand (FILE *file, rtx x, int letter)
1962 if (!x)
1963 error ("PRINT_OPERAND null pointer");
1965 switch (letter)
1967 case 'D':
1968 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1969 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1970 else
1971 output_operand_lossage ("invalid %%D value");
1972 break;
1974 case 'v':
1975 if (GET_CODE (x) == MEM)
1977 /* For a volatile memory reference, emit a MEMW before the
1978 load or store. */
1979 if (MEM_VOLATILE_P (x))
1980 fprintf (file, "memw\n\t");
1982 else
1983 output_operand_lossage ("invalid %%v value");
1984 break;
1986 case 'N':
1987 if (GET_CODE (x) == MEM
1988 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1990 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1991 output_address (XEXP (x, 0));
1993 else
1994 output_operand_lossage ("invalid %%N value");
1995 break;
1997 case 'K':
1998 if (GET_CODE (x) == CONST_INT)
2000 int num_bits = 0;
2001 unsigned val = INTVAL (x);
2002 while (val & 1)
2004 num_bits += 1;
2005 val = val >> 1;
2007 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2008 fatal_insn ("invalid mask", x);
2010 fprintf (file, "%d", num_bits);
2012 else
2013 output_operand_lossage ("invalid %%K value");
2014 break;
2016 case 'L':
2017 if (GET_CODE (x) == CONST_INT)
2018 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2019 else
2020 output_operand_lossage ("invalid %%L value");
2021 break;
2023 case 'R':
2024 if (GET_CODE (x) == CONST_INT)
2025 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2026 else
2027 output_operand_lossage ("invalid %%R value");
2028 break;
2030 case 'x':
2031 if (GET_CODE (x) == CONST_INT)
2032 printx (file, INTVAL (x));
2033 else
2034 output_operand_lossage ("invalid %%x value");
2035 break;
2037 case 'd':
2038 if (GET_CODE (x) == CONST_INT)
2039 fprintf (file, "%ld", INTVAL (x));
2040 else
2041 output_operand_lossage ("invalid %%d value");
2042 break;
2044 case 't':
2045 case 'b':
2046 if (GET_CODE (x) == CONST_INT)
2048 printx (file, INTVAL (x));
2049 fputs (letter == 't' ? "@h" : "@l", file);
2051 else if (GET_CODE (x) == CONST_DOUBLE)
2053 REAL_VALUE_TYPE r;
2054 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2055 if (GET_MODE (x) == SFmode)
2057 long l;
2058 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2059 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2061 else
2062 output_operand_lossage ("invalid %%t/%%b value");
2064 else if (GET_CODE (x) == CONST)
2066 /* X must be a symbolic constant on ELF. Write an expression
2067 suitable for 'const16' that sets the high or low 16 bits. */
2068 if (GET_CODE (XEXP (x, 0)) != PLUS
2069 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2070 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2071 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2072 output_operand_lossage ("invalid %%t/%%b value");
2073 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2074 fputs (letter == 't' ? "@h" : "@l", file);
2075 /* There must be a non-alphanumeric character between 'h' or 'l'
2076 and the number. The '-' is added by print_operand() already. */
2077 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2078 fputs ("+", file);
2079 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2081 else
2083 output_addr_const (file, x);
2084 fputs (letter == 't' ? "@h" : "@l", file);
2086 break;
2088 default:
2089 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2090 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2091 else if (GET_CODE (x) == MEM)
2092 output_address (XEXP (x, 0));
2093 else if (GET_CODE (x) == CONST_INT)
2094 fprintf (file, "%ld", INTVAL (x));
2095 else
2096 output_addr_const (file, x);
2101 /* A C compound statement to output to stdio stream STREAM the
2102 assembler syntax for an instruction operand that is a memory
2103 reference whose address is ADDR. ADDR is an RTL expression. */
2105 void
2106 print_operand_address (FILE *file, rtx addr)
2108 if (!addr)
2109 error ("PRINT_OPERAND_ADDRESS, null pointer");
2111 switch (GET_CODE (addr))
2113 default:
2114 fatal_insn ("invalid address", addr);
2115 break;
2117 case REG:
2118 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2119 break;
2121 case PLUS:
2123 rtx reg = (rtx)0;
2124 rtx offset = (rtx)0;
2125 rtx arg0 = XEXP (addr, 0);
2126 rtx arg1 = XEXP (addr, 1);
2128 if (GET_CODE (arg0) == REG)
2130 reg = arg0;
2131 offset = arg1;
2133 else if (GET_CODE (arg1) == REG)
2135 reg = arg1;
2136 offset = arg0;
2138 else
2139 fatal_insn ("no register in address", addr);
2141 if (CONSTANT_P (offset))
2143 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2144 output_addr_const (file, offset);
2146 else
2147 fatal_insn ("address offset not a constant", addr);
2149 break;
2151 case LABEL_REF:
2152 case SYMBOL_REF:
2153 case CONST_INT:
2154 case CONST:
2155 output_addr_const (file, addr);
2156 break;
2161 void
2162 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2164 long value_long[2];
2165 REAL_VALUE_TYPE r;
2166 int size;
2168 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2170 switch (GET_MODE_CLASS (mode))
2172 case MODE_FLOAT:
2173 if (GET_CODE (x) != CONST_DOUBLE)
2174 abort ();
2176 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2177 switch (mode)
2179 case SFmode:
2180 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2181 fprintf (file, "0x%08lx\n", value_long[0]);
2182 break;
2184 case DFmode:
2185 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2186 fprintf (file, "0x%08lx, 0x%08lx\n",
2187 value_long[0], value_long[1]);
2188 break;
2190 default:
2191 abort ();
2194 break;
2196 case MODE_INT:
2197 case MODE_PARTIAL_INT:
2198 size = GET_MODE_SIZE (mode);
2199 if (size == 4)
2201 output_addr_const (file, x);
2202 fputs ("\n", file);
2204 else if (size == 8)
2206 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2207 fputs (", ", file);
2208 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2209 fputs ("\n", file);
2211 else
2212 abort ();
2213 break;
2215 default:
2216 abort ();
2221 /* Return the bytes needed to compute the frame pointer from the current
2222 stack pointer. */
2224 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2225 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2227 long
2228 compute_frame_size (int size)
2230 /* Add space for the incoming static chain value. */
2231 if (current_function_needs_context)
2232 size += (1 * UNITS_PER_WORD);
2234 xtensa_current_frame_size =
2235 XTENSA_STACK_ALIGN (size
2236 + current_function_outgoing_args_size
2237 + (WINDOW_SIZE * UNITS_PER_WORD));
2238 return xtensa_current_frame_size;
2243 xtensa_frame_pointer_required (void)
2245 /* The code to expand builtin_frame_addr and builtin_return_addr
2246 currently uses the hard_frame_pointer instead of frame_pointer.
2247 This seems wrong but maybe it's necessary for other architectures.
2248 This function is derived from the i386 code. */
2250 if (cfun->machine->accesses_prev_frame)
2251 return 1;
2253 return 0;
2257 void
2258 xtensa_expand_prologue (void)
2260 HOST_WIDE_INT total_size;
2261 rtx size_rtx;
2263 total_size = compute_frame_size (get_frame_size ());
2264 size_rtx = GEN_INT (total_size);
2266 if (total_size < (1 << (12+3)))
2267 emit_insn (gen_entry (size_rtx, size_rtx));
2268 else
2270 /* Use a8 as a temporary since a0-a7 may be live. */
2271 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2272 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2273 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2274 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2275 emit_move_insn (stack_pointer_rtx, tmp_reg);
2278 if (frame_pointer_needed)
2280 if (cfun->machine->set_frame_ptr_insn)
2282 rtx first, insn;
2284 push_topmost_sequence ();
2285 first = get_insns ();
2286 pop_topmost_sequence ();
2288 /* For all instructions prior to set_frame_ptr_insn, replace
2289 hard_frame_pointer references with stack_pointer. */
2290 for (insn = first;
2291 insn != cfun->machine->set_frame_ptr_insn;
2292 insn = NEXT_INSN (insn))
2294 if (INSN_P (insn))
2295 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2296 hard_frame_pointer_rtx,
2297 stack_pointer_rtx);
2300 else
2301 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2306 /* Clear variables at function end. */
2308 void
2309 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2310 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2312 xtensa_current_frame_size = 0;
2317 xtensa_return_addr (int count, rtx frame)
2319 rtx result, retaddr;
2321 if (count == -1)
2322 retaddr = gen_rtx_REG (Pmode, A0_REG);
2323 else
2325 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2326 addr = memory_address (Pmode, addr);
2327 retaddr = gen_reg_rtx (Pmode);
2328 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2331 /* The 2 most-significant bits of the return address on Xtensa hold
2332 the register window size. To get the real return address, these
2333 bits must be replaced with the high bits from the current PC. */
2335 result = gen_reg_rtx (Pmode);
2336 emit_insn (gen_fix_return_addr (result, retaddr));
2337 return result;
2341 /* Create the va_list data type.
2343 This structure is set up by __builtin_saveregs. The __va_reg field
2344 points to a stack-allocated region holding the contents of the
2345 incoming argument registers. The __va_ndx field is an index
2346 initialized to the position of the first unnamed (variable)
2347 argument. This same index is also used to address the arguments
2348 passed in memory. Thus, the __va_stk field is initialized to point
2349 to the position of the first argument in memory offset to account
2350 for the arguments passed in registers and to account for the size
2351 of the argument registers not being 16-byte aligned. E.G., there
2352 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2353 for the first stack argument to have the maximal alignment of 16
2354 bytes, so we offset the __va_stk address by 32 bytes so that
2355 __va_stk[32] references the first argument on the stack. */
2357 static tree
2358 xtensa_build_builtin_va_list (void)
2360 tree f_stk, f_reg, f_ndx, record, type_decl;
2362 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2363 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2365 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2366 ptr_type_node);
2367 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2368 ptr_type_node);
2369 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2370 integer_type_node);
2372 DECL_FIELD_CONTEXT (f_stk) = record;
2373 DECL_FIELD_CONTEXT (f_reg) = record;
2374 DECL_FIELD_CONTEXT (f_ndx) = record;
2376 TREE_CHAIN (record) = type_decl;
2377 TYPE_NAME (record) = type_decl;
2378 TYPE_FIELDS (record) = f_stk;
2379 TREE_CHAIN (f_stk) = f_reg;
2380 TREE_CHAIN (f_reg) = f_ndx;
2382 layout_type (record);
2383 return record;
2387 /* Save the incoming argument registers on the stack. Returns the
2388 address of the saved registers. */
2390 static rtx
2391 xtensa_builtin_saveregs (void)
2393 rtx gp_regs, dest;
2394 int arg_words = current_function_args_info.arg_words;
2395 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2397 if (gp_left <= 0)
2398 return const0_rtx;
2400 /* Allocate the general-purpose register space. */
2401 gp_regs = assign_stack_local
2402 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2403 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2405 /* Now store the incoming registers. */
2406 dest = change_address (gp_regs, SImode,
2407 plus_constant (XEXP (gp_regs, 0),
2408 arg_words * UNITS_PER_WORD));
2409 cfun->machine->need_a7_copy = true;
2410 cfun->machine->vararg_a7 = true;
2411 move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left);
2413 return XEXP (gp_regs, 0);
2417 /* Implement `va_start' for varargs and stdarg. We look at the
2418 current function to fill in an initial va_list. */
2420 void
2421 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2423 tree f_stk, stk;
2424 tree f_reg, reg;
2425 tree f_ndx, ndx;
2426 tree t, u;
2427 int arg_words;
2429 arg_words = current_function_args_info.arg_words;
2431 f_stk = TYPE_FIELDS (va_list_type_node);
2432 f_reg = TREE_CHAIN (f_stk);
2433 f_ndx = TREE_CHAIN (f_reg);
2435 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2436 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2437 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2439 /* Call __builtin_saveregs; save the result in __va_reg */
2440 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2441 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2442 TREE_SIDE_EFFECTS (t) = 1;
2443 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2445 /* Set the __va_stk member to ($arg_ptr - 32). */
2446 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2447 u = fold (build (PLUS_EXPR, ptr_type_node, u, build_int_2 (-32, -1)));
2448 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2449 TREE_SIDE_EFFECTS (t) = 1;
2450 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2452 /* Set the __va_ndx member. If the first variable argument is on
2453 the stack, adjust __va_ndx by 2 words to account for the extra
2454 alignment offset for __va_stk. */
2455 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2456 arg_words += 2;
2457 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2458 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2459 TREE_SIDE_EFFECTS (t) = 1;
2460 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2464 /* Implement `va_arg'. */
2467 xtensa_va_arg (tree valist, tree type)
2469 tree f_stk, stk;
2470 tree f_reg, reg;
2471 tree f_ndx, ndx;
2472 tree tmp, addr_tree, type_size;
2473 rtx array, orig_ndx, r, addr, size, va_size;
2474 rtx lab_false, lab_over, lab_false2;
2476 /* Handle complex values as separate real and imaginary parts. */
2477 if (TREE_CODE (type) == COMPLEX_TYPE)
2479 rtx real_part, imag_part, concat_val, local_copy;
2481 real_part = xtensa_va_arg (valist, TREE_TYPE (type));
2482 imag_part = xtensa_va_arg (valist, TREE_TYPE (type));
2484 /* Make a copy of the value in case the parts are not contiguous. */
2485 real_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), real_part);
2486 imag_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), imag_part);
2487 concat_val = gen_rtx_CONCAT (TYPE_MODE (type), real_part, imag_part);
2489 local_copy = assign_temp (type, 0, 1, 0);
2490 emit_move_insn (local_copy, concat_val);
2492 return XEXP (local_copy, 0);
2495 f_stk = TYPE_FIELDS (va_list_type_node);
2496 f_reg = TREE_CHAIN (f_stk);
2497 f_ndx = TREE_CHAIN (f_reg);
2499 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2500 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2501 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2503 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2505 va_size = gen_reg_rtx (SImode);
2506 tmp = fold (build (MULT_EXPR, sizetype,
2507 fold (build (TRUNC_DIV_EXPR, sizetype,
2508 fold (build (PLUS_EXPR, sizetype,
2509 type_size,
2510 size_int (UNITS_PER_WORD - 1))),
2511 size_int (UNITS_PER_WORD))),
2512 size_int (UNITS_PER_WORD)));
2513 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2514 if (r != va_size)
2515 emit_move_insn (va_size, r);
2518 /* First align __va_ndx if necessary for this arg:
2520 if (__alignof__ (TYPE) > 4 )
2521 (AP).__va_ndx = (((AP).__va_ndx + __alignof__ (TYPE) - 1)
2522 & -__alignof__ (TYPE)); */
2524 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2526 int align = TYPE_ALIGN (type) / BITS_PER_UNIT;
2527 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2528 build_int_2 (align - 1, 0));
2529 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2530 build_int_2 (-align, -1));
2531 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2532 TREE_SIDE_EFFECTS (tmp) = 1;
2533 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2537 /* Increment __va_ndx to point past the argument:
2539 orig_ndx = (AP).__va_ndx;
2540 (AP).__va_ndx += __va_size (TYPE); */
2542 orig_ndx = gen_reg_rtx (SImode);
2543 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2544 if (r != orig_ndx)
2545 emit_move_insn (orig_ndx, r);
2547 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2548 make_tree (intSI_type_node, va_size));
2549 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2550 TREE_SIDE_EFFECTS (tmp) = 1;
2551 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2554 /* Check if the argument is in registers:
2556 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2557 && !MUST_PASS_IN_STACK (type))
2558 __array = (AP).__va_reg; */
2560 array = gen_reg_rtx (Pmode);
2562 lab_over = NULL_RTX;
2563 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2565 lab_false = gen_label_rtx ();
2566 lab_over = gen_label_rtx ();
2568 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2569 EXPAND_NORMAL),
2570 GEN_INT (MAX_ARGS_IN_REGISTERS
2571 * UNITS_PER_WORD),
2572 GT, const1_rtx, SImode, 0, lab_false);
2574 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2575 if (r != array)
2576 emit_move_insn (array, r);
2578 emit_jump_insn (gen_jump (lab_over));
2579 emit_barrier ();
2580 emit_label (lab_false);
2583 /* ...otherwise, the argument is on the stack (never split between
2584 registers and the stack -- change __va_ndx if necessary):
2586 else
2588 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2589 (AP).__va_ndx = 32 + __va_size (TYPE);
2590 __array = (AP).__va_stk;
2591 } */
2593 lab_false2 = gen_label_rtx ();
2594 emit_cmp_and_jump_insns (orig_ndx,
2595 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2596 GT, const1_rtx, SImode, 0, lab_false2);
2598 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2599 build_int_2 (32, 0));
2600 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2601 TREE_SIDE_EFFECTS (tmp) = 1;
2602 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2604 emit_label (lab_false2);
2606 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2607 if (r != array)
2608 emit_move_insn (array, r);
2610 if (lab_over != NULL_RTX)
2611 emit_label (lab_over);
2614 /* Given the base array pointer (__array) and index to the subsequent
2615 argument (__va_ndx), find the address:
2617 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2618 ? sizeof (TYPE)
2619 : __va_size (TYPE))
2621 The results are endian-dependent because values smaller than one word
2622 are aligned differently. */
2624 size = gen_reg_rtx (SImode);
2625 emit_move_insn (size, va_size);
2627 if (BYTES_BIG_ENDIAN)
2629 rtx lab_use_va_size = gen_label_rtx ();
2631 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2632 EXPAND_NORMAL),
2633 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2634 GE, const1_rtx, SImode, 0, lab_use_va_size);
2636 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2637 if (r != size)
2638 emit_move_insn (size, r);
2640 emit_label (lab_use_va_size);
2643 addr_tree = build (PLUS_EXPR, ptr_type_node,
2644 make_tree (ptr_type_node, array),
2645 ndx);
2646 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2647 make_tree (intSI_type_node, size));
2648 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2649 addr = copy_to_reg (addr);
2650 return addr;
2654 enum reg_class
2655 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2657 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2658 return NO_REGS;
2660 /* Don't use the stack pointer or hard frame pointer for reloads!
2661 The hard frame pointer would normally be OK except that it may
2662 briefly hold an incoming argument in the prologue, and reload
2663 won't know that it is live because the hard frame pointer is
2664 treated specially. */
2666 if (class == AR_REGS || class == GR_REGS)
2667 return RL_REGS;
2669 return class;
2673 enum reg_class
2674 xtensa_secondary_reload_class (enum reg_class class,
2675 enum machine_mode mode ATTRIBUTE_UNUSED,
2676 rtx x, int isoutput)
2678 int regno;
2680 if (GET_CODE (x) == SIGN_EXTEND)
2681 x = XEXP (x, 0);
2682 regno = xt_true_regnum (x);
2684 if (!isoutput)
2686 if (class == FP_REGS && constantpool_mem_p (x))
2687 return RL_REGS;
2690 if (ACC_REG_P (regno))
2691 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2692 if (class == ACC_REG)
2693 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2695 return NO_REGS;
2699 void
2700 order_regs_for_local_alloc (void)
2702 if (!leaf_function_p ())
2704 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2705 FIRST_PSEUDO_REGISTER * sizeof (int));
2707 else
2709 int i, num_arg_regs;
2710 int nxt = 0;
2712 /* Use the AR registers in increasing order (skipping a0 and a1)
2713 but save the incoming argument registers for a last resort. */
2714 num_arg_regs = current_function_args_info.arg_words;
2715 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2716 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2717 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2718 reg_alloc_order[nxt++] = i + num_arg_regs;
2719 for (i = 0; i < num_arg_regs; i++)
2720 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2722 /* List the coprocessor registers in order. */
2723 for (i = 0; i < BR_REG_NUM; i++)
2724 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2726 /* List the FP registers in order for now. */
2727 for (i = 0; i < 16; i++)
2728 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2730 /* GCC requires that we list *all* the registers.... */
2731 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2732 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2733 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2734 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2736 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2741 /* Some Xtensa targets support multiple bss sections. If the section
2742 name ends with ".bss", add SECTION_BSS to the flags. */
2744 static unsigned int
2745 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2747 unsigned int flags = default_section_type_flags (decl, name, reloc);
2748 const char *suffix;
2750 suffix = strrchr (name, '.');
2751 if (suffix && strcmp (suffix, ".bss") == 0)
2753 if (!decl || (TREE_CODE (decl) == VAR_DECL
2754 && DECL_INITIAL (decl) == NULL_TREE))
2755 flags |= SECTION_BSS; /* @nobits */
2756 else
2757 warning ("only uninitialized variables can be placed in a "
2758 ".bss section");
2761 return flags;
2765 /* The literal pool stays with the function. */
2767 static void
2768 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2769 rtx x ATTRIBUTE_UNUSED,
2770 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2772 function_section (current_function_decl);
2776 /* Compute a (partial) cost for rtx X. Return true if the complete
2777 cost has been computed, and false if subexpressions should be
2778 scanned. In either case, *TOTAL contains the cost result. */
2780 static bool
2781 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2783 switch (code)
2785 case CONST_INT:
2786 switch (outer_code)
2788 case SET:
2789 if (xtensa_simm12b (INTVAL (x)))
2791 *total = 4;
2792 return true;
2794 break;
2795 case PLUS:
2796 if (xtensa_simm8 (INTVAL (x))
2797 || xtensa_simm8x256 (INTVAL (x)))
2799 *total = 0;
2800 return true;
2802 break;
2803 case AND:
2804 if (xtensa_mask_immediate (INTVAL (x)))
2806 *total = 0;
2807 return true;
2809 break;
2810 case COMPARE:
2811 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2813 *total = 0;
2814 return true;
2816 break;
2817 case ASHIFT:
2818 case ASHIFTRT:
2819 case LSHIFTRT:
2820 case ROTATE:
2821 case ROTATERT:
2822 /* No way to tell if X is the 2nd operand so be conservative. */
2823 default: break;
2825 if (xtensa_simm12b (INTVAL (x)))
2826 *total = 5;
2827 else if (TARGET_CONST16)
2828 *total = COSTS_N_INSNS (2);
2829 else
2830 *total = 6;
2831 return true;
2833 case CONST:
2834 case LABEL_REF:
2835 case SYMBOL_REF:
2836 if (TARGET_CONST16)
2837 *total = COSTS_N_INSNS (2);
2838 else
2839 *total = 5;
2840 return true;
2842 case CONST_DOUBLE:
2843 if (TARGET_CONST16)
2844 *total = COSTS_N_INSNS (4);
2845 else
2846 *total = 7;
2847 return true;
2849 case MEM:
2851 int num_words =
2852 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2854 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2855 *total = COSTS_N_INSNS (num_words);
2856 else
2857 *total = COSTS_N_INSNS (2*num_words);
2858 return true;
2861 case FFS:
2862 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2863 return true;
2865 case NOT:
2866 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2867 return true;
2869 case AND:
2870 case IOR:
2871 case XOR:
2872 if (GET_MODE (x) == DImode)
2873 *total = COSTS_N_INSNS (2);
2874 else
2875 *total = COSTS_N_INSNS (1);
2876 return true;
2878 case ASHIFT:
2879 case ASHIFTRT:
2880 case LSHIFTRT:
2881 if (GET_MODE (x) == DImode)
2882 *total = COSTS_N_INSNS (50);
2883 else
2884 *total = COSTS_N_INSNS (1);
2885 return true;
2887 case ABS:
2889 enum machine_mode xmode = GET_MODE (x);
2890 if (xmode == SFmode)
2891 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2892 else if (xmode == DFmode)
2893 *total = COSTS_N_INSNS (50);
2894 else
2895 *total = COSTS_N_INSNS (4);
2896 return true;
2899 case PLUS:
2900 case MINUS:
2902 enum machine_mode xmode = GET_MODE (x);
2903 if (xmode == SFmode)
2904 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2905 else if (xmode == DFmode || xmode == DImode)
2906 *total = COSTS_N_INSNS (50);
2907 else
2908 *total = COSTS_N_INSNS (1);
2909 return true;
2912 case NEG:
2913 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2914 return true;
2916 case MULT:
2918 enum machine_mode xmode = GET_MODE (x);
2919 if (xmode == SFmode)
2920 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2921 else if (xmode == DFmode || xmode == DImode)
2922 *total = COSTS_N_INSNS (50);
2923 else if (TARGET_MUL32)
2924 *total = COSTS_N_INSNS (4);
2925 else if (TARGET_MAC16)
2926 *total = COSTS_N_INSNS (16);
2927 else if (TARGET_MUL16)
2928 *total = COSTS_N_INSNS (12);
2929 else
2930 *total = COSTS_N_INSNS (50);
2931 return true;
2934 case DIV:
2935 case MOD:
2937 enum machine_mode xmode = GET_MODE (x);
2938 if (xmode == SFmode)
2940 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2941 return true;
2943 else if (xmode == DFmode)
2945 *total = COSTS_N_INSNS (50);
2946 return true;
2949 /* Fall through. */
2951 case UDIV:
2952 case UMOD:
2954 enum machine_mode xmode = GET_MODE (x);
2955 if (xmode == DImode)
2956 *total = COSTS_N_INSNS (50);
2957 else if (TARGET_DIV32)
2958 *total = COSTS_N_INSNS (32);
2959 else
2960 *total = COSTS_N_INSNS (50);
2961 return true;
2964 case SQRT:
2965 if (GET_MODE (x) == SFmode)
2966 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2967 else
2968 *total = COSTS_N_INSNS (50);
2969 return true;
2971 case SMIN:
2972 case UMIN:
2973 case SMAX:
2974 case UMAX:
2975 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2976 return true;
2978 case SIGN_EXTRACT:
2979 case SIGN_EXTEND:
2980 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2981 return true;
2983 case ZERO_EXTRACT:
2984 case ZERO_EXTEND:
2985 *total = COSTS_N_INSNS (1);
2986 return true;
2988 default:
2989 return false;
2993 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2995 static bool
2996 xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2998 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
2999 > 4 * UNITS_PER_WORD);
3002 #include "gt-xtensa.h"