* config/xtensa/xtensa.c (xtensa_va_arg): Handle complex values as
[official-gcc.git] / gcc / config / xtensa / xtensa.c
blob0debc3ee615d2e078f7eb74ce69424f2e014e566
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002,2003 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "function.h"
44 #include "toplev.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
54 of EQ, NE, etc. */
56 enum internal_test
58 ITEST_EQ,
59 ITEST_NE,
60 ITEST_GT,
61 ITEST_GE,
62 ITEST_LT,
63 ITEST_LE,
64 ITEST_GTU,
65 ITEST_GEU,
66 ITEST_LTU,
67 ITEST_LEU,
68 ITEST_MAX
71 /* Cached operands, and operator to compare for use in set/branch on
72 condition codes. */
73 rtx branch_cmp[2];
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
99 treatment. */
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 ACC_REG,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero (int);
194 static enum internal_test map_test_to_internal_test (enum rtx_code);
195 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
196 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
197 static rtx gen_conditional_move (rtx);
198 static rtx fixup_subreg_mem (rtx);
199 static enum machine_mode xtensa_find_mode_for_size (unsigned);
200 static struct machine_function * xtensa_init_machine_status (void);
201 static void printx (FILE *, signed int);
202 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
203 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
204 int) ATTRIBUTE_UNUSED;
205 static void xtensa_select_rtx_section (enum machine_mode, rtx,
206 unsigned HOST_WIDE_INT);
207 static bool xtensa_rtx_costs (rtx, int, int, int *);
208 static tree xtensa_build_builtin_va_list (void);
210 static int current_function_arg_words;
211 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
212 REG_ALLOC_ORDER;
215 /* This macro generates the assembly code for function exit,
216 on machines that need it. If FUNCTION_EPILOGUE is not defined
217 then individual return instructions are generated for each
218 return statement. Args are same as for FUNCTION_PROLOGUE. */
220 #undef TARGET_ASM_FUNCTION_EPILOGUE
221 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
223 /* These hooks specify assembly directives for creating certain kinds
224 of integer object. */
226 #undef TARGET_ASM_ALIGNED_SI_OP
227 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
229 #undef TARGET_ASM_SELECT_RTX_SECTION
230 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
232 #undef TARGET_RTX_COSTS
233 #define TARGET_RTX_COSTS xtensa_rtx_costs
234 #undef TARGET_ADDRESS_COST
235 #define TARGET_ADDRESS_COST hook_int_rtx_0
237 #undef TARGET_BUILD_BUILTIN_VA_LIST
238 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
240 struct gcc_target targetm = TARGET_INITIALIZER;
244 * Functions to test Xtensa immediate operand validity.
248 xtensa_b4constu (int v)
250 switch (v)
252 case 32768:
253 case 65536:
254 case 2:
255 case 3:
256 case 4:
257 case 5:
258 case 6:
259 case 7:
260 case 8:
261 case 10:
262 case 12:
263 case 16:
264 case 32:
265 case 64:
266 case 128:
267 case 256:
268 return 1;
270 return 0;
274 xtensa_simm8x256 (int v)
276 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
280 xtensa_ai4const (int v)
282 return (v == -1 || (v >= 1 && v <= 15));
286 xtensa_simm7 (int v)
288 return v >= -32 && v <= 95;
292 xtensa_b4const (int v)
294 switch (v)
296 case -1:
297 case 1:
298 case 2:
299 case 3:
300 case 4:
301 case 5:
302 case 6:
303 case 7:
304 case 8:
305 case 10:
306 case 12:
307 case 16:
308 case 32:
309 case 64:
310 case 128:
311 case 256:
312 return 1;
314 return 0;
318 xtensa_simm8 (int v)
320 return v >= -128 && v <= 127;
324 xtensa_tp7 (int v)
326 return (v >= 7 && v <= 22);
330 xtensa_lsi4x4 (int v)
332 return (v & 3) == 0 && (v >= 0 && v <= 60);
336 xtensa_simm12b (int v)
338 return v >= -2048 && v <= 2047;
342 xtensa_uimm8 (int v)
344 return v >= 0 && v <= 255;
348 xtensa_uimm8x2 (int v)
350 return (v & 1) == 0 && (v >= 0 && v <= 510);
354 xtensa_uimm8x4 (int v)
356 return (v & 3) == 0 && (v >= 0 && v <= 1020);
360 /* This is just like the standard true_regnum() function except that it
361 works even when reg_renumber is not initialized. */
364 xt_true_regnum (rtx x)
366 if (GET_CODE (x) == REG)
368 if (reg_renumber
369 && REGNO (x) >= FIRST_PSEUDO_REGISTER
370 && reg_renumber[REGNO (x)] >= 0)
371 return reg_renumber[REGNO (x)];
372 return REGNO (x);
374 if (GET_CODE (x) == SUBREG)
376 int base = xt_true_regnum (SUBREG_REG (x));
377 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
378 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
379 GET_MODE (SUBREG_REG (x)),
380 SUBREG_BYTE (x), GET_MODE (x));
382 return -1;
387 add_operand (rtx op, enum machine_mode mode)
389 if (GET_CODE (op) == CONST_INT)
390 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
392 return register_operand (op, mode);
397 arith_operand (rtx op, enum machine_mode mode)
399 if (GET_CODE (op) == CONST_INT)
400 return xtensa_simm8 (INTVAL (op));
402 return register_operand (op, mode);
407 nonimmed_operand (rtx op, enum machine_mode mode)
409 /* We cannot use the standard nonimmediate_operand() predicate because
410 it includes constant pool memory operands. */
412 if (memory_operand (op, mode))
413 return !constantpool_address_p (XEXP (op, 0));
415 return register_operand (op, mode);
420 mem_operand (rtx op, enum machine_mode mode)
422 /* We cannot use the standard memory_operand() predicate because
423 it includes constant pool memory operands. */
425 if (memory_operand (op, mode))
426 return !constantpool_address_p (XEXP (op, 0));
428 return FALSE;
433 xtensa_valid_move (enum machine_mode mode, rtx *operands)
435 /* Either the destination or source must be a register, and the
436 MAC16 accumulator doesn't count. */
438 if (register_operand (operands[0], mode))
440 int dst_regnum = xt_true_regnum (operands[0]);
442 /* The stack pointer can only be assigned with a MOVSP opcode. */
443 if (dst_regnum == STACK_POINTER_REGNUM)
444 return (mode == SImode
445 && register_operand (operands[1], mode)
446 && !ACC_REG_P (xt_true_regnum (operands[1])));
448 if (!ACC_REG_P (dst_regnum))
449 return true;
451 if (register_operand (operands[1], mode))
453 int src_regnum = xt_true_regnum (operands[1]);
454 if (!ACC_REG_P (src_regnum))
455 return true;
457 return FALSE;
462 mask_operand (rtx op, enum machine_mode mode)
464 if (GET_CODE (op) == CONST_INT)
465 return xtensa_mask_immediate (INTVAL (op));
467 return register_operand (op, mode);
472 extui_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
474 return ((GET_CODE (op) == CONST_INT)
475 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
480 sext_operand (rtx op, enum machine_mode mode)
482 if (TARGET_SEXT)
483 return nonimmed_operand (op, mode);
484 return mem_operand (op, mode);
489 sext_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
491 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
496 lsbitnum_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
498 if (GET_CODE (op) == CONST_INT)
500 return (BITS_BIG_ENDIAN
501 ? (INTVAL (op) == BITS_PER_WORD-1)
502 : (INTVAL (op) == 0));
504 return FALSE;
508 static int
509 b4const_or_zero (int v)
511 if (v == 0)
512 return TRUE;
513 return xtensa_b4const (v);
518 branch_operand (rtx op, enum machine_mode mode)
520 if (GET_CODE (op) == CONST_INT)
521 return b4const_or_zero (INTVAL (op));
523 return register_operand (op, mode);
528 ubranch_operand (rtx op, enum machine_mode mode)
530 if (GET_CODE (op) == CONST_INT)
531 return xtensa_b4constu (INTVAL (op));
533 return register_operand (op, mode);
538 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
540 if ((GET_CODE (op) == REG)
541 && (op != arg_pointer_rtx)
542 && ((REGNO (op) < FRAME_POINTER_REGNUM)
543 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
544 return TRUE;
546 if (CONSTANT_ADDRESS_P (op))
548 /* Direct calls only allowed to static functions with PIC. */
549 if (flag_pic)
551 tree callee, callee_sec, caller_sec;
553 if (GET_CODE (op) != SYMBOL_REF || !SYMBOL_REF_LOCAL_P (op))
554 return FALSE;
556 /* Don't attempt a direct call if the callee is known to be in
557 a different section, since there's a good chance it will be
558 out of range. */
560 if (flag_function_sections
561 || DECL_ONE_ONLY (current_function_decl))
562 return FALSE;
563 caller_sec = DECL_SECTION_NAME (current_function_decl);
564 callee = SYMBOL_REF_DECL (op);
565 if (callee)
567 if (DECL_ONE_ONLY (callee))
568 return FALSE;
569 callee_sec = DECL_SECTION_NAME (callee);
570 if (((caller_sec == NULL_TREE) ^ (callee_sec == NULL_TREE))
571 || (caller_sec != NULL_TREE
572 && strcmp (TREE_STRING_POINTER (caller_sec),
573 TREE_STRING_POINTER (callee_sec)) != 0))
574 return FALSE;
576 else if (caller_sec != NULL_TREE)
577 return FALSE;
579 return TRUE;
582 return FALSE;
587 move_operand (rtx op, enum machine_mode mode)
589 if (register_operand (op, mode)
590 || memory_operand (op, mode))
591 return TRUE;
593 switch (mode)
595 case DFmode:
596 case SFmode:
597 return TARGET_CONST16 && CONSTANT_P (op);
599 case DImode:
600 case SImode:
601 if (TARGET_CONST16)
602 return CONSTANT_P (op);
603 /* Fall through. */
605 case HImode:
606 case QImode:
607 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
608 result in 0/1. */
609 if (GET_CODE (op) == CONSTANT_P_RTX)
610 return TRUE;
612 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
613 return TRUE;
614 break;
616 default:
617 break;
620 return FALSE;
625 smalloffset_mem_p (rtx op)
627 if (GET_CODE (op) == MEM)
629 rtx addr = XEXP (op, 0);
630 if (GET_CODE (addr) == REG)
631 return REG_OK_FOR_BASE_P (addr);
632 if (GET_CODE (addr) == PLUS)
634 rtx offset = XEXP (addr, 0);
635 if (GET_CODE (offset) != CONST_INT)
636 offset = XEXP (addr, 1);
637 if (GET_CODE (offset) != CONST_INT)
638 return FALSE;
639 return xtensa_lsi4x4 (INTVAL (offset));
642 return FALSE;
647 constantpool_address_p (rtx addr)
649 rtx sym = addr;
651 if (GET_CODE (addr) == CONST)
653 rtx offset;
655 /* Only handle (PLUS (SYM, OFFSET)) form. */
656 addr = XEXP (addr, 0);
657 if (GET_CODE (addr) != PLUS)
658 return FALSE;
660 /* Make sure the address is word aligned. */
661 offset = XEXP (addr, 1);
662 if ((GET_CODE (offset) != CONST_INT)
663 || ((INTVAL (offset) & 3) != 0))
664 return FALSE;
666 sym = XEXP (addr, 0);
669 if ((GET_CODE (sym) == SYMBOL_REF)
670 && CONSTANT_POOL_ADDRESS_P (sym))
671 return TRUE;
672 return FALSE;
677 constantpool_mem_p (rtx op)
679 if (GET_CODE (op) == MEM)
680 return constantpool_address_p (XEXP (op, 0));
681 return FALSE;
685 /* Accept the floating point constant 1 in the appropriate mode. */
688 const_float_1_operand (rtx op, enum machine_mode mode)
690 REAL_VALUE_TYPE d;
691 static REAL_VALUE_TYPE onedf;
692 static REAL_VALUE_TYPE onesf;
693 static int one_initialized;
695 if ((GET_CODE (op) != CONST_DOUBLE)
696 || (mode != GET_MODE (op))
697 || (mode != DFmode && mode != SFmode))
698 return FALSE;
700 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
702 if (! one_initialized)
704 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
705 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
706 one_initialized = TRUE;
709 if (mode == DFmode)
710 return REAL_VALUES_EQUAL (d, onedf);
711 else
712 return REAL_VALUES_EQUAL (d, onesf);
717 fpmem_offset_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
719 if (GET_CODE (op) == CONST_INT)
720 return xtensa_mem_offset (INTVAL (op), SFmode);
721 return 0;
725 void
726 xtensa_extend_reg (rtx dst, rtx src)
728 rtx temp = gen_reg_rtx (SImode);
729 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
731 /* Generate paradoxical subregs as needed so that the modes match. */
732 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
733 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
735 emit_insn (gen_ashlsi3 (temp, src, shift));
736 emit_insn (gen_ashrsi3 (dst, temp, shift));
741 branch_operator (rtx x, enum machine_mode mode)
743 if (GET_MODE (x) != mode)
744 return FALSE;
746 switch (GET_CODE (x))
748 case EQ:
749 case NE:
750 case LT:
751 case GE:
752 return TRUE;
753 default:
754 break;
756 return FALSE;
761 ubranch_operator (rtx x, enum machine_mode mode)
763 if (GET_MODE (x) != mode)
764 return FALSE;
766 switch (GET_CODE (x))
768 case LTU:
769 case GEU:
770 return TRUE;
771 default:
772 break;
774 return FALSE;
779 boolean_operator (rtx x, enum machine_mode mode)
781 if (GET_MODE (x) != mode)
782 return FALSE;
784 switch (GET_CODE (x))
786 case EQ:
787 case NE:
788 return TRUE;
789 default:
790 break;
792 return FALSE;
797 xtensa_mask_immediate (int v)
799 #define MAX_MASK_SIZE 16
800 int mask_size;
802 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
804 if ((v & 1) == 0)
805 return FALSE;
806 v = v >> 1;
807 if (v == 0)
808 return TRUE;
811 return FALSE;
816 xtensa_mem_offset (unsigned v, enum machine_mode mode)
818 switch (mode)
820 case BLKmode:
821 /* Handle the worst case for block moves. See xtensa_expand_block_move
822 where we emit an optimized block move operation if the block can be
823 moved in < "move_ratio" pieces. The worst case is when the block is
824 aligned but has a size of (3 mod 4) (does this happen?) so that the
825 last piece requires a byte load/store. */
826 return (xtensa_uimm8 (v)
827 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
829 case QImode:
830 return xtensa_uimm8 (v);
832 case HImode:
833 return xtensa_uimm8x2 (v);
835 case DFmode:
836 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
838 default:
839 break;
842 return xtensa_uimm8x4 (v);
846 /* Make normal rtx_code into something we can index from an array. */
848 static enum internal_test
849 map_test_to_internal_test (enum rtx_code test_code)
851 enum internal_test test = ITEST_MAX;
853 switch (test_code)
855 default: break;
856 case EQ: test = ITEST_EQ; break;
857 case NE: test = ITEST_NE; break;
858 case GT: test = ITEST_GT; break;
859 case GE: test = ITEST_GE; break;
860 case LT: test = ITEST_LT; break;
861 case LE: test = ITEST_LE; break;
862 case GTU: test = ITEST_GTU; break;
863 case GEU: test = ITEST_GEU; break;
864 case LTU: test = ITEST_LTU; break;
865 case LEU: test = ITEST_LEU; break;
868 return test;
872 /* Generate the code to compare two integer values. The return value is
873 the comparison expression. */
875 static rtx
876 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
877 rtx cmp0, /* first operand to compare */
878 rtx cmp1, /* second operand to compare */
879 int *p_invert /* whether branch needs to reverse test */)
881 struct cmp_info
883 enum rtx_code test_code; /* test code to use in insn */
884 int (*const_range_p) (int); /* predicate function to check range */
885 int const_add; /* constant to add (convert LE -> LT) */
886 int reverse_regs; /* reverse registers in test */
887 int invert_const; /* != 0 if invert value if cmp1 is constant */
888 int invert_reg; /* != 0 if invert value if cmp1 is register */
889 int unsignedp; /* != 0 for unsigned comparisons. */
892 static struct cmp_info info[ (int)ITEST_MAX ] = {
894 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
895 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
897 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
898 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
899 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
900 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
902 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
903 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
904 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
905 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
908 enum internal_test test;
909 enum machine_mode mode;
910 struct cmp_info *p_info;
912 test = map_test_to_internal_test (test_code);
913 if (test == ITEST_MAX)
914 abort ();
916 p_info = &info[ (int)test ];
918 mode = GET_MODE (cmp0);
919 if (mode == VOIDmode)
920 mode = GET_MODE (cmp1);
922 /* Make sure we can handle any constants given to us. */
923 if (GET_CODE (cmp1) == CONST_INT)
925 HOST_WIDE_INT value = INTVAL (cmp1);
926 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
928 /* if the immediate overflows or does not fit in the immediate field,
929 spill it to a register */
931 if ((p_info->unsignedp ?
932 (uvalue + p_info->const_add > uvalue) :
933 (value + p_info->const_add > value)) != (p_info->const_add > 0))
935 cmp1 = force_reg (mode, cmp1);
937 else if (!(p_info->const_range_p) (value + p_info->const_add))
939 cmp1 = force_reg (mode, cmp1);
942 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
944 cmp1 = force_reg (mode, cmp1);
947 /* See if we need to invert the result. */
948 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
949 ? p_info->invert_const
950 : p_info->invert_reg);
952 /* Comparison to constants, may involve adding 1 to change a LT into LE.
953 Comparison between two registers, may involve switching operands. */
954 if (GET_CODE (cmp1) == CONST_INT)
956 if (p_info->const_add != 0)
957 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
960 else if (p_info->reverse_regs)
962 rtx temp = cmp0;
963 cmp0 = cmp1;
964 cmp1 = temp;
967 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
971 /* Generate the code to compare two float values. The return value is
972 the comparison expression. */
974 static rtx
975 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
976 rtx cmp0, /* first operand to compare */
977 rtx cmp1 /* second operand to compare */)
979 rtx (*gen_fn) (rtx, rtx, rtx);
980 rtx brtmp;
981 int reverse_regs, invert;
983 switch (test_code)
985 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
986 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
987 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
988 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
989 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
990 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
991 default:
992 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
993 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
996 if (reverse_regs)
998 rtx temp = cmp0;
999 cmp0 = cmp1;
1000 cmp1 = temp;
1003 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1004 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1006 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1010 void
1011 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
1013 enum cmp_type type = branch_type;
1014 rtx cmp0 = branch_cmp[0];
1015 rtx cmp1 = branch_cmp[1];
1016 rtx cmp;
1017 int invert;
1018 rtx label1, label2;
1020 switch (type)
1022 case CMP_DF:
1023 default:
1024 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1026 case CMP_SI:
1027 invert = FALSE;
1028 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1029 break;
1031 case CMP_SF:
1032 if (!TARGET_HARD_FLOAT)
1033 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1034 invert = FALSE;
1035 cmp = gen_float_relational (test_code, cmp0, cmp1);
1036 break;
1039 /* Generate the branch. */
1041 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1042 label2 = pc_rtx;
1044 if (invert)
1046 label2 = label1;
1047 label1 = pc_rtx;
1050 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1051 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1052 label1,
1053 label2)));
1057 static rtx
1058 gen_conditional_move (rtx cmp)
1060 enum rtx_code code = GET_CODE (cmp);
1061 rtx op0 = branch_cmp[0];
1062 rtx op1 = branch_cmp[1];
1064 if (branch_type == CMP_SI)
1066 /* Jump optimization calls get_condition() which canonicalizes
1067 comparisons like (GE x <const>) to (GT x <const-1>).
1068 Transform those comparisons back to GE, since that is the
1069 comparison supported in Xtensa. We shouldn't have to
1070 transform <LE x const> comparisons, because neither
1071 xtensa_expand_conditional_branch() nor get_condition() will
1072 produce them. */
1074 if ((code == GT) && (op1 == constm1_rtx))
1076 code = GE;
1077 op1 = const0_rtx;
1079 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1081 if (boolean_operator (cmp, VOIDmode))
1083 /* Swap the operands to make const0 second. */
1084 if (op0 == const0_rtx)
1086 op0 = op1;
1087 op1 = const0_rtx;
1090 /* If not comparing against zero, emit a comparison (subtract). */
1091 if (op1 != const0_rtx)
1093 op0 = expand_binop (SImode, sub_optab, op0, op1,
1094 0, 0, OPTAB_LIB_WIDEN);
1095 op1 = const0_rtx;
1098 else if (branch_operator (cmp, VOIDmode))
1100 /* Swap the operands to make const0 second. */
1101 if (op0 == const0_rtx)
1103 op0 = op1;
1104 op1 = const0_rtx;
1106 switch (code)
1108 case LT: code = GE; break;
1109 case GE: code = LT; break;
1110 default: abort ();
1114 if (op1 != const0_rtx)
1115 return 0;
1117 else
1118 return 0;
1120 return gen_rtx (code, VOIDmode, op0, op1);
1123 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1124 return gen_float_relational (code, op0, op1);
1126 return 0;
1131 xtensa_expand_conditional_move (rtx *operands, int isflt)
1133 rtx cmp;
1134 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1136 if (!(cmp = gen_conditional_move (operands[1])))
1137 return 0;
1139 if (isflt)
1140 gen_fn = (branch_type == CMP_SI
1141 ? gen_movsfcc_internal0
1142 : gen_movsfcc_internal1);
1143 else
1144 gen_fn = (branch_type == CMP_SI
1145 ? gen_movsicc_internal0
1146 : gen_movsicc_internal1);
1148 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1149 operands[2], operands[3], cmp));
1150 return 1;
1155 xtensa_expand_scc (rtx *operands)
1157 rtx dest = operands[0];
1158 rtx cmp = operands[1];
1159 rtx one_tmp, zero_tmp;
1160 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1162 if (!(cmp = gen_conditional_move (cmp)))
1163 return 0;
1165 one_tmp = gen_reg_rtx (SImode);
1166 zero_tmp = gen_reg_rtx (SImode);
1167 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1168 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1170 gen_fn = (branch_type == CMP_SI
1171 ? gen_movsicc_internal0
1172 : gen_movsicc_internal1);
1173 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1174 return 1;
1178 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1179 for the output, i.e., the input operands are twice as big as MODE. */
1181 void
1182 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
1184 switch (GET_CODE (operands[1]))
1186 case REG:
1187 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1188 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1189 break;
1191 case MEM:
1192 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1193 operands[2] = adjust_address (operands[1], mode, 0);
1194 break;
1196 case CONST_INT:
1197 case CONST_DOUBLE:
1198 split_double (operands[1], &operands[2], &operands[3]);
1199 break;
1201 default:
1202 abort ();
1205 switch (GET_CODE (operands[0]))
1207 case REG:
1208 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1209 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1210 break;
1212 case MEM:
1213 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1214 operands[0] = adjust_address (operands[0], mode, 0);
1215 break;
1217 default:
1218 abort ();
1223 /* Emit insns to move operands[1] into operands[0].
1224 Return 1 if we have written out everything that needs to be done to
1225 do the move. Otherwise, return 0 and the caller will emit the move
1226 normally. */
1229 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1231 if (CONSTANT_P (operands[1])
1232 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1233 && (GET_CODE (operands[1]) != CONST_INT
1234 || !xtensa_simm12b (INTVAL (operands[1]))))
1236 if (!TARGET_CONST16)
1237 operands[1] = force_const_mem (SImode, operands[1]);
1239 /* PC-relative loads are always SImode, and CONST16 is only
1240 supported in the movsi pattern, so add a SUBREG for any other
1241 (smaller) mode. */
1243 if (mode != SImode)
1245 if (register_operand (operands[0], mode))
1247 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1248 emit_move_insn (operands[0], operands[1]);
1249 return 1;
1251 else
1253 operands[1] = force_reg (SImode, operands[1]);
1254 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1259 if (!(reload_in_progress | reload_completed))
1261 if (!xtensa_valid_move (mode, operands))
1262 operands[1] = force_reg (mode, operands[1]);
1264 if (xtensa_copy_incoming_a7 (operands, mode))
1265 return 1;
1268 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1269 instruction won't be recognized after reload, so we remove the
1270 subreg and adjust mem accordingly. */
1271 if (reload_in_progress)
1273 operands[0] = fixup_subreg_mem (operands[0]);
1274 operands[1] = fixup_subreg_mem (operands[1]);
1276 return 0;
1280 static rtx
1281 fixup_subreg_mem (rtx x)
1283 if (GET_CODE (x) == SUBREG
1284 && GET_CODE (SUBREG_REG (x)) == REG
1285 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1287 rtx temp =
1288 gen_rtx_SUBREG (GET_MODE (x),
1289 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1290 SUBREG_BYTE (x));
1291 x = alter_subreg (&temp);
1293 return x;
1297 /* Check if this move is copying an incoming argument in a7. If so,
1298 emit the move, followed by the special "set_frame_ptr"
1299 unspec_volatile insn, at the very beginning of the function. This
1300 is necessary because the register allocator will ignore conflicts
1301 with a7 and may assign some other pseudo to a7. If that pseudo was
1302 assigned prior to this move, it would clobber the incoming argument
1303 in a7. By copying the argument out of a7 as the very first thing,
1304 and then immediately following that with an unspec_volatile to keep
1305 the scheduler away, we should avoid any problems. */
1307 bool
1308 xtensa_copy_incoming_a7 (rtx *operands, enum machine_mode mode)
1310 if (a7_overlap_mentioned_p (operands[1])
1311 && !cfun->machine->incoming_a7_copied)
1313 rtx mov;
1314 switch (mode)
1316 case DFmode:
1317 mov = gen_movdf_internal (operands[0], operands[1]);
1318 break;
1319 case SFmode:
1320 mov = gen_movsf_internal (operands[0], operands[1]);
1321 break;
1322 case DImode:
1323 mov = gen_movdi_internal (operands[0], operands[1]);
1324 break;
1325 case SImode:
1326 mov = gen_movsi_internal (operands[0], operands[1]);
1327 break;
1328 case HImode:
1329 mov = gen_movhi_internal (operands[0], operands[1]);
1330 break;
1331 case QImode:
1332 mov = gen_movqi_internal (operands[0], operands[1]);
1333 break;
1334 default:
1335 abort ();
1338 /* Insert the instructions before any other argument copies.
1339 (The set_frame_ptr insn comes _after_ the move, so push it
1340 out first.) */
1341 push_topmost_sequence ();
1342 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1343 emit_insn_after (mov, get_insns ());
1344 pop_topmost_sequence ();
1346 /* Ideally the incoming argument in a7 would only be copied
1347 once, since propagating a7 into the body of a function
1348 will almost certainly lead to errors. However, there is
1349 at least one harmless case (in GCSE) where the original
1350 copy from a7 is changed to copy into a new pseudo. Thus,
1351 we use a flag to only do this special treatment for the
1352 first copy of a7. */
1354 cfun->machine->incoming_a7_copied = true;
1356 return 1;
1359 return 0;
1363 /* Try to expand a block move operation to an RTL block move instruction.
1364 If not optimizing or if the block size is not a constant or if the
1365 block is small, the expansion fails and GCC falls back to calling
1366 memcpy().
1368 operands[0] is the destination
1369 operands[1] is the source
1370 operands[2] is the length
1371 operands[3] is the alignment */
1374 xtensa_expand_block_move (rtx *operands)
1376 rtx dest = operands[0];
1377 rtx src = operands[1];
1378 int bytes = INTVAL (operands[2]);
1379 int align = XINT (operands[3], 0);
1380 int num_pieces, move_ratio;
1382 /* If this is not a fixed size move, just call memcpy. */
1383 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1384 return 0;
1386 /* Anything to move? */
1387 if (bytes <= 0)
1388 return 1;
1390 if (align > MOVE_MAX)
1391 align = MOVE_MAX;
1393 /* Decide whether to expand inline based on the optimization level. */
1394 move_ratio = 4;
1395 if (optimize > 2)
1396 move_ratio = LARGEST_MOVE_RATIO;
1397 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1398 if (num_pieces >= move_ratio)
1399 return 0;
1401 /* Make sure the memory addresses are valid. */
1402 operands[0] = validize_mem (dest);
1403 operands[1] = validize_mem (src);
1405 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1406 operands[2], operands[3]));
1407 return 1;
1411 /* Emit a sequence of instructions to implement a block move, trying
1412 to hide load delay slots as much as possible. Load N values into
1413 temporary registers, store those N values, and repeat until the
1414 complete block has been moved. N=delay_slots+1. */
1416 struct meminsnbuf
1418 char template[30];
1419 rtx operands[2];
1422 void
1423 xtensa_emit_block_move (rtx *operands, rtx *tmpregs, int delay_slots)
1425 rtx dest = operands[0];
1426 rtx src = operands[1];
1427 int bytes = INTVAL (operands[2]);
1428 int align = XINT (operands[3], 0);
1429 rtx from_addr = XEXP (src, 0);
1430 rtx to_addr = XEXP (dest, 0);
1431 int from_struct = MEM_IN_STRUCT_P (src);
1432 int to_struct = MEM_IN_STRUCT_P (dest);
1433 int offset = 0;
1434 int chunk_size, item_size;
1435 struct meminsnbuf *ldinsns, *stinsns;
1436 const char *ldname, *stname;
1437 enum machine_mode mode;
1439 if (align > MOVE_MAX)
1440 align = MOVE_MAX;
1441 item_size = align;
1442 chunk_size = delay_slots + 1;
1444 ldinsns = (struct meminsnbuf *)
1445 alloca (chunk_size * sizeof (struct meminsnbuf));
1446 stinsns = (struct meminsnbuf *)
1447 alloca (chunk_size * sizeof (struct meminsnbuf));
1449 mode = xtensa_find_mode_for_size (item_size);
1450 item_size = GET_MODE_SIZE (mode);
1451 ldname = xtensa_ld_opcodes[(int) mode];
1452 stname = xtensa_st_opcodes[(int) mode];
1454 while (bytes > 0)
1456 int n;
1458 for (n = 0; n < chunk_size; n++)
1460 rtx addr, mem;
1462 if (bytes == 0)
1464 chunk_size = n;
1465 break;
1468 if (bytes < item_size)
1470 /* Find a smaller item_size which we can load & store. */
1471 item_size = bytes;
1472 mode = xtensa_find_mode_for_size (item_size);
1473 item_size = GET_MODE_SIZE (mode);
1474 ldname = xtensa_ld_opcodes[(int) mode];
1475 stname = xtensa_st_opcodes[(int) mode];
1478 /* Record the load instruction opcode and operands. */
1479 addr = plus_constant (from_addr, offset);
1480 mem = gen_rtx_MEM (mode, addr);
1481 if (! memory_address_p (mode, addr))
1482 abort ();
1483 MEM_IN_STRUCT_P (mem) = from_struct;
1484 ldinsns[n].operands[0] = tmpregs[n];
1485 ldinsns[n].operands[1] = mem;
1486 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1488 /* Record the store instruction opcode and operands. */
1489 addr = plus_constant (to_addr, offset);
1490 mem = gen_rtx_MEM (mode, addr);
1491 if (! memory_address_p (mode, addr))
1492 abort ();
1493 MEM_IN_STRUCT_P (mem) = to_struct;
1494 stinsns[n].operands[0] = tmpregs[n];
1495 stinsns[n].operands[1] = mem;
1496 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1498 offset += item_size;
1499 bytes -= item_size;
1502 /* Now output the loads followed by the stores. */
1503 for (n = 0; n < chunk_size; n++)
1504 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1505 for (n = 0; n < chunk_size; n++)
1506 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1511 static enum machine_mode
1512 xtensa_find_mode_for_size (unsigned item_size)
1514 enum machine_mode mode, tmode;
1516 while (1)
1518 mode = VOIDmode;
1520 /* Find mode closest to but not bigger than item_size. */
1521 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1522 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1523 if (GET_MODE_SIZE (tmode) <= item_size)
1524 mode = tmode;
1525 if (mode == VOIDmode)
1526 abort ();
1528 item_size = GET_MODE_SIZE (mode);
1530 if (xtensa_ld_opcodes[(int) mode]
1531 && xtensa_st_opcodes[(int) mode])
1532 break;
1534 /* Cannot load & store this mode; try something smaller. */
1535 item_size -= 1;
1538 return mode;
1542 void
1543 xtensa_expand_nonlocal_goto (rtx *operands)
1545 rtx goto_handler = operands[1];
1546 rtx containing_fp = operands[3];
1548 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1549 is too big to generate in-line. */
1551 if (GET_CODE (containing_fp) != REG)
1552 containing_fp = force_reg (Pmode, containing_fp);
1554 goto_handler = replace_rtx (copy_rtx (goto_handler),
1555 virtual_stack_vars_rtx,
1556 containing_fp);
1558 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1559 0, VOIDmode, 2,
1560 containing_fp, Pmode,
1561 goto_handler, Pmode);
1565 static struct machine_function *
1566 xtensa_init_machine_status (void)
1568 return ggc_alloc_cleared (sizeof (struct machine_function));
1572 void
1573 xtensa_setup_frame_addresses (void)
1575 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1576 cfun->machine->accesses_prev_frame = 1;
1578 emit_library_call
1579 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1580 0, VOIDmode, 0);
1584 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1585 a comment showing where the end of the loop is. However, if there is a
1586 label or a branch at the end of the loop then we need to place a nop
1587 there. If the loop ends with a label we need the nop so that branches
1588 targeting that label will target the nop (and thus remain in the loop),
1589 instead of targeting the instruction after the loop (and thus exiting
1590 the loop). If the loop ends with a branch, we need the nop in case the
1591 branch is targeting a location inside the loop. When the branch
1592 executes it will cause the loop count to be decremented even if it is
1593 taken (because it is the last instruction in the loop), so we need to
1594 nop after the branch to prevent the loop count from being decremented
1595 when the branch is taken. */
1597 void
1598 xtensa_emit_loop_end (rtx insn, rtx *operands)
1600 char done = 0;
1602 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1604 switch (GET_CODE (insn))
1606 case NOTE:
1607 case BARRIER:
1608 break;
1610 case CODE_LABEL:
1611 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1612 done = 1;
1613 break;
1615 default:
1617 rtx body = PATTERN (insn);
1619 if (GET_CODE (body) == JUMP_INSN)
1621 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1622 done = 1;
1624 else if ((GET_CODE (body) != USE)
1625 && (GET_CODE (body) != CLOBBER))
1626 done = 1;
1628 break;
1632 output_asm_insn ("# loop end for %0", operands);
1636 char *
1637 xtensa_emit_call (int callop, rtx *operands)
1639 static char result[64];
1640 rtx tgt = operands[callop];
1642 if (GET_CODE (tgt) == CONST_INT)
1643 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1644 else if (register_operand (tgt, VOIDmode))
1645 sprintf (result, "callx8\t%%%d", callop);
1646 else
1647 sprintf (result, "call8\t%%%d", callop);
1649 return result;
1653 /* Return the debugger register number to use for 'regno'. */
1656 xtensa_dbx_register_number (int regno)
1658 int first = -1;
1660 if (GP_REG_P (regno))
1662 regno -= GP_REG_FIRST;
1663 first = 0;
1665 else if (BR_REG_P (regno))
1667 regno -= BR_REG_FIRST;
1668 first = 16;
1670 else if (FP_REG_P (regno))
1672 regno -= FP_REG_FIRST;
1673 first = 48;
1675 else if (ACC_REG_P (regno))
1677 first = 0x200; /* Start of Xtensa special registers. */
1678 regno = 16; /* ACCLO is special register 16. */
1681 /* When optimizing, we sometimes get asked about pseudo-registers
1682 that don't represent hard registers. Return 0 for these. */
1683 if (first == -1)
1684 return 0;
1686 return first + regno;
1690 /* Argument support functions. */
1692 /* Initialize CUMULATIVE_ARGS for a function. */
1694 void
1695 init_cumulative_args (CUMULATIVE_ARGS *cum,
1696 tree fntype ATTRIBUTE_UNUSED,
1697 rtx libname ATTRIBUTE_UNUSED)
1699 cum->arg_words = 0;
1703 /* Advance the argument to the next argument position. */
1705 void
1706 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1708 int words, max;
1709 int *arg_words;
1711 arg_words = &cum->arg_words;
1712 max = MAX_ARGS_IN_REGISTERS;
1714 words = (((mode != BLKmode)
1715 ? (int) GET_MODE_SIZE (mode)
1716 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1718 if ((*arg_words + words > max) && (*arg_words < max))
1719 *arg_words = max;
1721 *arg_words += words;
1725 /* Return an RTL expression containing the register for the given mode,
1726 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1727 if this is an incoming argument to the current function. */
1730 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1731 int incoming_p)
1733 int regbase, words, max;
1734 int *arg_words;
1735 int regno;
1736 enum machine_mode result_mode;
1738 arg_words = &cum->arg_words;
1739 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1740 max = MAX_ARGS_IN_REGISTERS;
1742 words = (((mode != BLKmode)
1743 ? (int) GET_MODE_SIZE (mode)
1744 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1746 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1747 *arg_words += (*arg_words & 1);
1749 if (*arg_words + words > max)
1750 return (rtx)0;
1752 regno = regbase + *arg_words;
1753 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For multi-word
1757 modes for which we don't define move patterns, we can't control
1758 the expansion unless we explicitly list the individual registers
1759 in a PARALLEL. */
1761 if (mode != DImode && mode != DFmode
1762 && regno < A7_REG
1763 && regno + words > A7_REG)
1765 rtx result;
1766 int n;
1768 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1769 for (n = 0; n < words; n++)
1771 XVECEXP (result, 0, n) =
1772 gen_rtx_EXPR_LIST (VOIDmode,
1773 gen_raw_REG (SImode, regno + n),
1774 GEN_INT (n * UNITS_PER_WORD));
1776 return result;
1779 return gen_raw_REG (result_mode, regno);
1783 void
1784 override_options (void)
1786 int regno;
1787 enum machine_mode mode;
1789 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1790 error ("boolean registers required for the floating-point option");
1792 /* Set up the tables of ld/st opcode names for block moves. */
1793 xtensa_ld_opcodes[(int) SImode] = "l32i";
1794 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1795 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1796 xtensa_st_opcodes[(int) SImode] = "s32i";
1797 xtensa_st_opcodes[(int) HImode] = "s16i";
1798 xtensa_st_opcodes[(int) QImode] = "s8i";
1800 xtensa_char_to_class['q'] = SP_REG;
1801 xtensa_char_to_class['a'] = GR_REGS;
1802 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1803 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1804 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1805 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1806 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1809 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1811 /* Set up array giving whether a given register can hold a given mode. */
1812 for (mode = VOIDmode;
1813 mode != MAX_MACHINE_MODE;
1814 mode = (enum machine_mode) ((int) mode + 1))
1816 int size = GET_MODE_SIZE (mode);
1817 enum mode_class class = GET_MODE_CLASS (mode);
1819 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1821 int temp;
1823 if (ACC_REG_P (regno))
1824 temp = (TARGET_MAC16
1825 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1826 else if (GP_REG_P (regno))
1827 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1828 else if (FP_REG_P (regno))
1829 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1830 else if (BR_REG_P (regno))
1831 temp = (TARGET_BOOLEANS && (mode == CCmode));
1832 else
1833 temp = FALSE;
1835 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1839 init_machine_status = xtensa_init_machine_status;
1841 /* Check PIC settings. PIC is only supported when using L32R
1842 instructions, and some targets need to always use PIC. */
1843 if (flag_pic && TARGET_CONST16)
1844 error ("-f%s is not supported with CONST16 instructions",
1845 (flag_pic > 1 ? "PIC" : "pic"));
1846 else if (XTENSA_ALWAYS_PIC)
1848 if (TARGET_CONST16)
1849 error ("PIC is required but not supported with CONST16 instructions");
1850 flag_pic = 1;
1852 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1853 if (flag_pic > 1)
1854 flag_pic = 1;
1858 /* A C compound statement to output to stdio stream STREAM the
1859 assembler syntax for an instruction operand X. X is an RTL
1860 expression.
1862 CODE is a value that can be used to specify one of several ways
1863 of printing the operand. It is used when identical operands
1864 must be printed differently depending on the context. CODE
1865 comes from the '%' specification that was used to request
1866 printing of the operand. If the specification was just '%DIGIT'
1867 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1868 is the ASCII code for LTR.
1870 If X is a register, this macro should print the register's name.
1871 The names can be found in an array 'reg_names' whose type is
1872 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1874 When the machine description has a specification '%PUNCT' (a '%'
1875 followed by a punctuation character), this macro is called with
1876 a null pointer for X and the punctuation character for CODE.
1878 'a', 'c', 'l', and 'n' are reserved.
1880 The Xtensa specific codes are:
1882 'd' CONST_INT, print as signed decimal
1883 'x' CONST_INT, print as signed hexadecimal
1884 'K' CONST_INT, print number of bits in mask for EXTUI
1885 'R' CONST_INT, print (X & 0x1f)
1886 'L' CONST_INT, print ((32 - X) & 0x1f)
1887 'D' REG, print second register of double-word register operand
1888 'N' MEM, print address of next word following a memory operand
1889 'v' MEM, if memory reference is volatile, output a MEMW before it
1890 't' any constant, add "@h" suffix for top 16 bits
1891 'b' any constant, add "@l" suffix for bottom 16 bits
1894 static void
1895 printx (FILE *file, signed int val)
1897 /* Print a hexadecimal value in a nice way. */
1898 if ((val > -0xa) && (val < 0xa))
1899 fprintf (file, "%d", val);
1900 else if (val < 0)
1901 fprintf (file, "-0x%x", -val);
1902 else
1903 fprintf (file, "0x%x", val);
1907 void
1908 print_operand (FILE *file, rtx x, int letter)
1910 if (!x)
1911 error ("PRINT_OPERAND null pointer");
1913 switch (letter)
1915 case 'D':
1916 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1917 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1918 else
1919 output_operand_lossage ("invalid %%D value");
1920 break;
1922 case 'v':
1923 if (GET_CODE (x) == MEM)
1925 /* For a volatile memory reference, emit a MEMW before the
1926 load or store. */
1927 if (MEM_VOLATILE_P (x))
1928 fprintf (file, "memw\n\t");
1930 else
1931 output_operand_lossage ("invalid %%v value");
1932 break;
1934 case 'N':
1935 if (GET_CODE (x) == MEM
1936 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1938 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1939 output_address (XEXP (x, 0));
1941 else
1942 output_operand_lossage ("invalid %%N value");
1943 break;
1945 case 'K':
1946 if (GET_CODE (x) == CONST_INT)
1948 int num_bits = 0;
1949 unsigned val = INTVAL (x);
1950 while (val & 1)
1952 num_bits += 1;
1953 val = val >> 1;
1955 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1956 fatal_insn ("invalid mask", x);
1958 fprintf (file, "%d", num_bits);
1960 else
1961 output_operand_lossage ("invalid %%K value");
1962 break;
1964 case 'L':
1965 if (GET_CODE (x) == CONST_INT)
1966 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
1967 else
1968 output_operand_lossage ("invalid %%L value");
1969 break;
1971 case 'R':
1972 if (GET_CODE (x) == CONST_INT)
1973 fprintf (file, "%ld", INTVAL (x) & 0x1f);
1974 else
1975 output_operand_lossage ("invalid %%R value");
1976 break;
1978 case 'x':
1979 if (GET_CODE (x) == CONST_INT)
1980 printx (file, INTVAL (x));
1981 else
1982 output_operand_lossage ("invalid %%x value");
1983 break;
1985 case 'd':
1986 if (GET_CODE (x) == CONST_INT)
1987 fprintf (file, "%ld", INTVAL (x));
1988 else
1989 output_operand_lossage ("invalid %%d value");
1990 break;
1992 case 't':
1993 case 'b':
1994 if (GET_CODE (x) == CONST_INT)
1996 printx (file, INTVAL (x));
1997 fputs (letter == 't' ? "@h" : "@l", file);
1999 else if (GET_CODE (x) == CONST_DOUBLE)
2001 REAL_VALUE_TYPE r;
2002 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2003 if (GET_MODE (x) == SFmode)
2005 long l;
2006 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2007 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2009 else
2010 output_operand_lossage ("invalid %%t/%%b value");
2012 else if (GET_CODE (x) == CONST)
2014 /* X must be a symbolic constant on ELF. Write an expression
2015 suitable for 'const16' that sets the high or low 16 bits. */
2016 if (GET_CODE (XEXP (x, 0)) != PLUS
2017 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2018 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2019 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2020 output_operand_lossage ("invalid %%t/%%b value");
2021 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2022 fputs (letter == 't' ? "@h" : "@l", file);
2023 /* There must be a non-alphanumeric character between 'h' or 'l'
2024 and the number. The '-' is added by print_operand() already. */
2025 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2026 fputs ("+", file);
2027 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2029 else
2031 output_addr_const (file, x);
2032 fputs (letter == 't' ? "@h" : "@l", file);
2034 break;
2036 default:
2037 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2038 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2039 else if (GET_CODE (x) == MEM)
2040 output_address (XEXP (x, 0));
2041 else if (GET_CODE (x) == CONST_INT)
2042 fprintf (file, "%ld", INTVAL (x));
2043 else
2044 output_addr_const (file, x);
2049 /* A C compound statement to output to stdio stream STREAM the
2050 assembler syntax for an instruction operand that is a memory
2051 reference whose address is ADDR. ADDR is an RTL expression. */
2053 void
2054 print_operand_address (FILE *file, rtx addr)
2056 if (!addr)
2057 error ("PRINT_OPERAND_ADDRESS, null pointer");
2059 switch (GET_CODE (addr))
2061 default:
2062 fatal_insn ("invalid address", addr);
2063 break;
2065 case REG:
2066 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2067 break;
2069 case PLUS:
2071 rtx reg = (rtx)0;
2072 rtx offset = (rtx)0;
2073 rtx arg0 = XEXP (addr, 0);
2074 rtx arg1 = XEXP (addr, 1);
2076 if (GET_CODE (arg0) == REG)
2078 reg = arg0;
2079 offset = arg1;
2081 else if (GET_CODE (arg1) == REG)
2083 reg = arg1;
2084 offset = arg0;
2086 else
2087 fatal_insn ("no register in address", addr);
2089 if (CONSTANT_P (offset))
2091 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2092 output_addr_const (file, offset);
2094 else
2095 fatal_insn ("address offset not a constant", addr);
2097 break;
2099 case LABEL_REF:
2100 case SYMBOL_REF:
2101 case CONST_INT:
2102 case CONST:
2103 output_addr_const (file, addr);
2104 break;
2109 void
2110 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2112 long value_long[2];
2113 REAL_VALUE_TYPE r;
2114 int size;
2116 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2118 switch (GET_MODE_CLASS (mode))
2120 case MODE_FLOAT:
2121 if (GET_CODE (x) != CONST_DOUBLE)
2122 abort ();
2124 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2125 switch (mode)
2127 case SFmode:
2128 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2129 fprintf (file, "0x%08lx\n", value_long[0]);
2130 break;
2132 case DFmode:
2133 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2134 fprintf (file, "0x%08lx, 0x%08lx\n",
2135 value_long[0], value_long[1]);
2136 break;
2138 default:
2139 abort ();
2142 break;
2144 case MODE_INT:
2145 case MODE_PARTIAL_INT:
2146 size = GET_MODE_SIZE (mode);
2147 if (size == 4)
2149 output_addr_const (file, x);
2150 fputs ("\n", file);
2152 else if (size == 8)
2154 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2155 fputs (", ", file);
2156 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2157 fputs ("\n", file);
2159 else
2160 abort ();
2161 break;
2163 default:
2164 abort ();
2169 /* Return the bytes needed to compute the frame pointer from the current
2170 stack pointer. */
2172 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2173 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2175 long
2176 compute_frame_size (int size)
2178 /* Add space for the incoming static chain value. */
2179 if (current_function_needs_context)
2180 size += (1 * UNITS_PER_WORD);
2182 xtensa_current_frame_size =
2183 XTENSA_STACK_ALIGN (size
2184 + current_function_outgoing_args_size
2185 + (WINDOW_SIZE * UNITS_PER_WORD));
2186 return xtensa_current_frame_size;
2191 xtensa_frame_pointer_required (void)
2193 /* The code to expand builtin_frame_addr and builtin_return_addr
2194 currently uses the hard_frame_pointer instead of frame_pointer.
2195 This seems wrong but maybe it's necessary for other architectures.
2196 This function is derived from the i386 code. */
2198 if (cfun->machine->accesses_prev_frame)
2199 return 1;
2201 return 0;
2205 void
2206 xtensa_expand_prologue (void)
2208 HOST_WIDE_INT total_size;
2209 rtx size_rtx;
2211 total_size = compute_frame_size (get_frame_size ());
2212 size_rtx = GEN_INT (total_size);
2214 if (total_size < (1 << (12+3)))
2215 emit_insn (gen_entry (size_rtx, size_rtx));
2216 else
2218 /* Use a8 as a temporary since a0-a7 may be live. */
2219 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2220 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2221 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2222 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2223 emit_move_insn (stack_pointer_rtx, tmp_reg);
2226 if (frame_pointer_needed)
2228 rtx first, insn, set_frame_ptr_insn = 0;
2230 push_topmost_sequence ();
2231 first = get_insns ();
2232 pop_topmost_sequence ();
2234 /* Search all instructions, looking for the insn that sets up the
2235 frame pointer. This search will fail if the function does not
2236 have an incoming argument in $a7, but in that case, we can just
2237 set up the frame pointer at the very beginning of the
2238 function. */
2240 for (insn = first; insn; insn = NEXT_INSN (insn))
2242 rtx pat;
2244 if (!INSN_P (insn))
2245 continue;
2247 pat = PATTERN (insn);
2248 if (GET_CODE (pat) == SET
2249 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2250 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
2252 set_frame_ptr_insn = insn;
2253 break;
2257 if (set_frame_ptr_insn)
2259 /* For all instructions prior to set_frame_ptr_insn, replace
2260 hard_frame_pointer references with stack_pointer. */
2261 for (insn = first;
2262 insn != set_frame_ptr_insn;
2263 insn = NEXT_INSN (insn))
2265 if (INSN_P (insn))
2266 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2267 hard_frame_pointer_rtx,
2268 stack_pointer_rtx);
2271 else
2272 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2277 /* Clear variables at function end. */
2279 void
2280 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2281 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2283 xtensa_current_frame_size = 0;
2288 xtensa_return_addr (int count, rtx frame)
2290 rtx result, retaddr;
2292 if (count == -1)
2293 retaddr = gen_rtx_REG (Pmode, A0_REG);
2294 else
2296 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2297 addr = memory_address (Pmode, addr);
2298 retaddr = gen_reg_rtx (Pmode);
2299 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2302 /* The 2 most-significant bits of the return address on Xtensa hold
2303 the register window size. To get the real return address, these
2304 bits must be replaced with the high bits from the current PC. */
2306 result = gen_reg_rtx (Pmode);
2307 emit_insn (gen_fix_return_addr (result, retaddr));
2308 return result;
2312 /* Create the va_list data type.
2313 This structure is set up by __builtin_saveregs. The __va_reg
2314 field points to a stack-allocated region holding the contents of the
2315 incoming argument registers. The __va_ndx field is an index initialized
2316 to the position of the first unnamed (variable) argument. This same index
2317 is also used to address the arguments passed in memory. Thus, the
2318 __va_stk field is initialized to point to the position of the first
2319 argument in memory offset to account for the arguments passed in
2320 registers. E.G., if there are 6 argument registers, and each register is
2321 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2322 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2323 argument word N for N >= 6. */
2325 static tree
2326 xtensa_build_builtin_va_list (void)
2328 tree f_stk, f_reg, f_ndx, record, type_decl;
2330 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2331 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2333 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2334 ptr_type_node);
2335 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2336 ptr_type_node);
2337 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2338 integer_type_node);
2340 DECL_FIELD_CONTEXT (f_stk) = record;
2341 DECL_FIELD_CONTEXT (f_reg) = record;
2342 DECL_FIELD_CONTEXT (f_ndx) = record;
2344 TREE_CHAIN (record) = type_decl;
2345 TYPE_NAME (record) = type_decl;
2346 TYPE_FIELDS (record) = f_stk;
2347 TREE_CHAIN (f_stk) = f_reg;
2348 TREE_CHAIN (f_reg) = f_ndx;
2350 layout_type (record);
2351 return record;
2355 /* Save the incoming argument registers on the stack. Returns the
2356 address of the saved registers. */
2359 xtensa_builtin_saveregs (void)
2361 rtx gp_regs, dest;
2362 int arg_words = current_function_arg_words;
2363 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2364 int i;
2366 if (gp_left == 0)
2367 return const0_rtx;
2369 /* Allocate the general-purpose register space. */
2370 gp_regs = assign_stack_local
2371 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2372 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2374 /* Now store the incoming registers. */
2375 dest = change_address (gp_regs, SImode,
2376 plus_constant (XEXP (gp_regs, 0),
2377 arg_words * UNITS_PER_WORD));
2379 /* Note: Don't use move_block_from_reg() here because the incoming
2380 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2381 Instead, call gen_raw_REG() directly so that we get a distinct
2382 instance of (REG:SI 7). */
2383 for (i = 0; i < gp_left; i++)
2385 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2386 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2389 return XEXP (gp_regs, 0);
2393 /* Implement `va_start' for varargs and stdarg. We look at the
2394 current function to fill in an initial va_list. */
2396 void
2397 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2399 tree f_stk, stk;
2400 tree f_reg, reg;
2401 tree f_ndx, ndx;
2402 tree t, u;
2403 int arg_words;
2405 arg_words = current_function_args_info.arg_words;
2407 f_stk = TYPE_FIELDS (va_list_type_node);
2408 f_reg = TREE_CHAIN (f_stk);
2409 f_ndx = TREE_CHAIN (f_reg);
2411 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2412 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2413 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2415 /* Call __builtin_saveregs; save the result in __va_reg */
2416 current_function_arg_words = arg_words;
2417 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2418 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2419 TREE_SIDE_EFFECTS (t) = 1;
2420 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2422 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2423 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2424 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2425 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2426 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2427 TREE_SIDE_EFFECTS (t) = 1;
2428 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2430 /* Set the __va_ndx member. */
2431 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2432 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2433 TREE_SIDE_EFFECTS (t) = 1;
2434 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2438 /* Implement `va_arg'. */
2441 xtensa_va_arg (tree valist, tree type)
2443 tree f_stk, stk;
2444 tree f_reg, reg;
2445 tree f_ndx, ndx;
2446 tree tmp, addr_tree, type_size;
2447 rtx array, orig_ndx, r, addr, size, va_size;
2448 rtx lab_false, lab_over, lab_false2;
2450 /* Handle complex values as separate real and imaginary parts. */
2451 if (TREE_CODE (type) == COMPLEX_TYPE)
2453 rtx real_part, imag_part, concat_val, local_copy;
2455 real_part = xtensa_va_arg (valist, TREE_TYPE (type));
2456 imag_part = xtensa_va_arg (valist, TREE_TYPE (type));
2458 /* Make a copy of the value in case the parts are not contiguous. */
2459 real_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), real_part);
2460 imag_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), imag_part);
2461 concat_val = gen_rtx_CONCAT (TYPE_MODE (type), real_part, imag_part);
2463 local_copy = assign_temp (type, 0, 1, 0);
2464 emit_move_insn (local_copy, concat_val);
2466 return XEXP (local_copy, 0);
2469 f_stk = TYPE_FIELDS (va_list_type_node);
2470 f_reg = TREE_CHAIN (f_stk);
2471 f_ndx = TREE_CHAIN (f_reg);
2473 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2474 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2475 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2477 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2479 va_size = gen_reg_rtx (SImode);
2480 tmp = fold (build (MULT_EXPR, sizetype,
2481 fold (build (TRUNC_DIV_EXPR, sizetype,
2482 fold (build (PLUS_EXPR, sizetype,
2483 type_size,
2484 size_int (UNITS_PER_WORD - 1))),
2485 size_int (UNITS_PER_WORD))),
2486 size_int (UNITS_PER_WORD)));
2487 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2488 if (r != va_size)
2489 emit_move_insn (va_size, r);
2492 /* First align __va_ndx to a double word boundary if necessary for this arg:
2494 if (__alignof__ (TYPE) > 4)
2495 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8); */
2497 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2499 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2500 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2501 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2502 build_int_2 (-2 * UNITS_PER_WORD, -1));
2503 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2504 TREE_SIDE_EFFECTS (tmp) = 1;
2505 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2509 /* Increment __va_ndx to point past the argument:
2511 orig_ndx = (AP).__va_ndx;
2512 (AP).__va_ndx += __va_size (TYPE); */
2514 orig_ndx = gen_reg_rtx (SImode);
2515 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2516 if (r != orig_ndx)
2517 emit_move_insn (orig_ndx, r);
2519 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2520 make_tree (intSI_type_node, va_size));
2521 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2522 TREE_SIDE_EFFECTS (tmp) = 1;
2523 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2526 /* Check if the argument is in registers:
2528 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2529 && !MUST_PASS_IN_STACK (type))
2530 __array = (AP).__va_reg; */
2532 array = gen_reg_rtx (Pmode);
2534 lab_over = NULL_RTX;
2535 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2537 lab_false = gen_label_rtx ();
2538 lab_over = gen_label_rtx ();
2540 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2541 EXPAND_NORMAL),
2542 GEN_INT (MAX_ARGS_IN_REGISTERS
2543 * UNITS_PER_WORD),
2544 GT, const1_rtx, SImode, 0, lab_false);
2546 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2547 if (r != array)
2548 emit_move_insn (array, r);
2550 emit_jump_insn (gen_jump (lab_over));
2551 emit_barrier ();
2552 emit_label (lab_false);
2555 /* ...otherwise, the argument is on the stack (never split between
2556 registers and the stack -- change __va_ndx if necessary):
2558 else
2560 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2561 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2562 __array = (AP).__va_stk;
2563 } */
2565 lab_false2 = gen_label_rtx ();
2566 emit_cmp_and_jump_insns (orig_ndx,
2567 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2568 GE, const1_rtx, SImode, 0, lab_false2);
2570 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2571 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2572 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2573 TREE_SIDE_EFFECTS (tmp) = 1;
2574 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2576 emit_label (lab_false2);
2578 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2579 if (r != array)
2580 emit_move_insn (array, r);
2582 if (lab_over != NULL_RTX)
2583 emit_label (lab_over);
2586 /* Given the base array pointer (__array) and index to the subsequent
2587 argument (__va_ndx), find the address:
2589 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2590 ? sizeof (TYPE)
2591 : __va_size (TYPE))
2593 The results are endian-dependent because values smaller than one word
2594 are aligned differently. */
2596 size = gen_reg_rtx (SImode);
2597 emit_move_insn (size, va_size);
2599 if (BYTES_BIG_ENDIAN)
2601 rtx lab_use_va_size = gen_label_rtx ();
2603 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2604 EXPAND_NORMAL),
2605 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2606 GE, const1_rtx, SImode, 0, lab_use_va_size);
2608 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2609 if (r != size)
2610 emit_move_insn (size, r);
2612 emit_label (lab_use_va_size);
2615 addr_tree = build (PLUS_EXPR, ptr_type_node,
2616 make_tree (ptr_type_node, array),
2617 ndx);
2618 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2619 make_tree (intSI_type_node, size));
2620 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2621 addr = copy_to_reg (addr);
2622 return addr;
2626 enum reg_class
2627 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2629 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2630 return NO_REGS;
2632 /* Don't use the stack pointer or hard frame pointer for reloads!
2633 The hard frame pointer would normally be OK except that it may
2634 briefly hold an incoming argument in the prologue, and reload
2635 won't know that it is live because the hard frame pointer is
2636 treated specially. */
2638 if (class == AR_REGS || class == GR_REGS)
2639 return RL_REGS;
2641 return class;
2645 enum reg_class
2646 xtensa_secondary_reload_class (enum reg_class class,
2647 enum machine_mode mode ATTRIBUTE_UNUSED,
2648 rtx x, int isoutput)
2650 int regno;
2652 if (GET_CODE (x) == SIGN_EXTEND)
2653 x = XEXP (x, 0);
2654 regno = xt_true_regnum (x);
2656 if (!isoutput)
2658 if (class == FP_REGS && constantpool_mem_p (x))
2659 return RL_REGS;
2662 if (ACC_REG_P (regno))
2663 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2664 if (class == ACC_REG)
2665 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2667 return NO_REGS;
2671 void
2672 order_regs_for_local_alloc (void)
2674 if (!leaf_function_p ())
2676 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2677 FIRST_PSEUDO_REGISTER * sizeof (int));
2679 else
2681 int i, num_arg_regs;
2682 int nxt = 0;
2684 /* Use the AR registers in increasing order (skipping a0 and a1)
2685 but save the incoming argument registers for a last resort. */
2686 num_arg_regs = current_function_args_info.arg_words;
2687 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2688 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2689 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2690 reg_alloc_order[nxt++] = i + num_arg_regs;
2691 for (i = 0; i < num_arg_regs; i++)
2692 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2694 /* List the coprocessor registers in order. */
2695 for (i = 0; i < BR_REG_NUM; i++)
2696 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2698 /* List the FP registers in order for now. */
2699 for (i = 0; i < 16; i++)
2700 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2702 /* GCC requires that we list *all* the registers.... */
2703 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2704 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2705 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2706 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2708 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2713 /* A customized version of reg_overlap_mentioned_p that only looks for
2714 references to a7 (as opposed to hard_frame_pointer_rtx). */
2717 a7_overlap_mentioned_p (rtx x)
2719 int i, j;
2720 unsigned int x_regno;
2721 const char *fmt;
2723 if (GET_CODE (x) == REG)
2725 x_regno = REGNO (x);
2726 return (x != hard_frame_pointer_rtx
2727 && x_regno < A7_REG + 1
2728 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2731 if (GET_CODE (x) == SUBREG
2732 && GET_CODE (SUBREG_REG (x)) == REG
2733 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2735 x_regno = subreg_regno (x);
2736 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2737 && x_regno < A7_REG + 1
2738 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2741 /* X does not match, so try its subexpressions. */
2742 fmt = GET_RTX_FORMAT (GET_CODE (x));
2743 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2745 if (fmt[i] == 'e')
2747 if (a7_overlap_mentioned_p (XEXP (x, i)))
2748 return 1;
2750 else if (fmt[i] == 'E')
2752 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2753 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2754 return 1;
2758 return 0;
2762 /* Some Xtensa targets support multiple bss sections. If the section
2763 name ends with ".bss", add SECTION_BSS to the flags. */
2765 static unsigned int
2766 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2768 unsigned int flags = default_section_type_flags (decl, name, reloc);
2769 const char *suffix;
2771 suffix = strrchr (name, '.');
2772 if (suffix && strcmp (suffix, ".bss") == 0)
2774 if (!decl || (TREE_CODE (decl) == VAR_DECL
2775 && DECL_INITIAL (decl) == NULL_TREE))
2776 flags |= SECTION_BSS; /* @nobits */
2777 else
2778 warning ("only uninitialized variables can be placed in a "
2779 ".bss section");
2782 return flags;
2786 /* The literal pool stays with the function. */
2788 static void
2789 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2790 rtx x ATTRIBUTE_UNUSED,
2791 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2793 function_section (current_function_decl);
2797 /* Compute a (partial) cost for rtx X. Return true if the complete
2798 cost has been computed, and false if subexpressions should be
2799 scanned. In either case, *TOTAL contains the cost result. */
2801 static bool
2802 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2804 switch (code)
2806 case CONST_INT:
2807 switch (outer_code)
2809 case SET:
2810 if (xtensa_simm12b (INTVAL (x)))
2812 *total = 4;
2813 return true;
2815 break;
2816 case PLUS:
2817 if (xtensa_simm8 (INTVAL (x))
2818 || xtensa_simm8x256 (INTVAL (x)))
2820 *total = 0;
2821 return true;
2823 break;
2824 case AND:
2825 if (xtensa_mask_immediate (INTVAL (x)))
2827 *total = 0;
2828 return true;
2830 break;
2831 case COMPARE:
2832 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2834 *total = 0;
2835 return true;
2837 break;
2838 case ASHIFT:
2839 case ASHIFTRT:
2840 case LSHIFTRT:
2841 case ROTATE:
2842 case ROTATERT:
2843 /* No way to tell if X is the 2nd operand so be conservative. */
2844 default: break;
2846 if (xtensa_simm12b (INTVAL (x)))
2847 *total = 5;
2848 else if (TARGET_CONST16)
2849 *total = COSTS_N_INSNS (2);
2850 else
2851 *total = 6;
2852 return true;
2854 case CONST:
2855 case LABEL_REF:
2856 case SYMBOL_REF:
2857 if (TARGET_CONST16)
2858 *total = COSTS_N_INSNS (2);
2859 else
2860 *total = 5;
2861 return true;
2863 case CONST_DOUBLE:
2864 if (TARGET_CONST16)
2865 *total = COSTS_N_INSNS (4);
2866 else
2867 *total = 7;
2868 return true;
2870 case MEM:
2872 int num_words =
2873 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2875 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2876 *total = COSTS_N_INSNS (num_words);
2877 else
2878 *total = COSTS_N_INSNS (2*num_words);
2879 return true;
2882 case FFS:
2883 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2884 return true;
2886 case NOT:
2887 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2888 return true;
2890 case AND:
2891 case IOR:
2892 case XOR:
2893 if (GET_MODE (x) == DImode)
2894 *total = COSTS_N_INSNS (2);
2895 else
2896 *total = COSTS_N_INSNS (1);
2897 return true;
2899 case ASHIFT:
2900 case ASHIFTRT:
2901 case LSHIFTRT:
2902 if (GET_MODE (x) == DImode)
2903 *total = COSTS_N_INSNS (50);
2904 else
2905 *total = COSTS_N_INSNS (1);
2906 return true;
2908 case ABS:
2910 enum machine_mode xmode = GET_MODE (x);
2911 if (xmode == SFmode)
2912 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2913 else if (xmode == DFmode)
2914 *total = COSTS_N_INSNS (50);
2915 else
2916 *total = COSTS_N_INSNS (4);
2917 return true;
2920 case PLUS:
2921 case MINUS:
2923 enum machine_mode xmode = GET_MODE (x);
2924 if (xmode == SFmode)
2925 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2926 else if (xmode == DFmode || xmode == DImode)
2927 *total = COSTS_N_INSNS (50);
2928 else
2929 *total = COSTS_N_INSNS (1);
2930 return true;
2933 case NEG:
2934 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2935 return true;
2937 case MULT:
2939 enum machine_mode xmode = GET_MODE (x);
2940 if (xmode == SFmode)
2941 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2942 else if (xmode == DFmode || xmode == DImode)
2943 *total = COSTS_N_INSNS (50);
2944 else if (TARGET_MUL32)
2945 *total = COSTS_N_INSNS (4);
2946 else if (TARGET_MAC16)
2947 *total = COSTS_N_INSNS (16);
2948 else if (TARGET_MUL16)
2949 *total = COSTS_N_INSNS (12);
2950 else
2951 *total = COSTS_N_INSNS (50);
2952 return true;
2955 case DIV:
2956 case MOD:
2958 enum machine_mode xmode = GET_MODE (x);
2959 if (xmode == SFmode)
2961 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2962 return true;
2964 else if (xmode == DFmode)
2966 *total = COSTS_N_INSNS (50);
2967 return true;
2970 /* Fall through. */
2972 case UDIV:
2973 case UMOD:
2975 enum machine_mode xmode = GET_MODE (x);
2976 if (xmode == DImode)
2977 *total = COSTS_N_INSNS (50);
2978 else if (TARGET_DIV32)
2979 *total = COSTS_N_INSNS (32);
2980 else
2981 *total = COSTS_N_INSNS (50);
2982 return true;
2985 case SQRT:
2986 if (GET_MODE (x) == SFmode)
2987 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2988 else
2989 *total = COSTS_N_INSNS (50);
2990 return true;
2992 case SMIN:
2993 case UMIN:
2994 case SMAX:
2995 case UMAX:
2996 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2997 return true;
2999 case SIGN_EXTRACT:
3000 case SIGN_EXTEND:
3001 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3002 return true;
3004 case ZERO_EXTRACT:
3005 case ZERO_EXTEND:
3006 *total = COSTS_N_INSNS (1);
3007 return true;
3009 default:
3010 return false;
3014 #include "gt-xtensa.h"