* config/xtensa/lib2funcs.S (TRAMPOLINE_SIZE): Change from 49 to 59.
[official-gcc.git] / gcc / config / xtensa / xtensa.c
blob25bf6475b4608ba33ff1e780b47e66be231e138f
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002,2003 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "function.h"
44 #include "toplev.h"
45 #include "optabs.h"
46 #include "output.h"
47 #include "libfuncs.h"
48 #include "ggc.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 /* Enumeration for all of the relational tests, so that we can build
54 arrays indexed by the test type, and not worry about the order
55 of EQ, NE, etc. */
57 enum internal_test {
58 ITEST_EQ,
59 ITEST_NE,
60 ITEST_GT,
61 ITEST_GE,
62 ITEST_LT,
63 ITEST_LE,
64 ITEST_GTU,
65 ITEST_GEU,
66 ITEST_LTU,
67 ITEST_LEU,
68 ITEST_MAX
71 /* Cached operands, and operator to compare for use in set/branch on
72 condition codes. */
73 rtx branch_cmp[2];
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
99 treatment. */
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 ACC_REG,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero PARAMS ((int));
194 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
195 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
196 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
197 static rtx gen_conditional_move PARAMS ((rtx));
198 static rtx fixup_subreg_mem PARAMS ((rtx x));
199 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
200 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
201 static void printx PARAMS ((FILE *, signed int));
202 static void xtensa_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
203 static unsigned int xtensa_multibss_section_type_flags
204 PARAMS ((tree, const char *, int));
205 static void xtensa_select_rtx_section
206 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
207 static bool xtensa_rtx_costs PARAMS ((rtx, int, int, int *));
209 static int current_function_arg_words;
210 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
211 REG_ALLOC_ORDER;
214 /* This macro generates the assembly code for function exit,
215 on machines that need it. If FUNCTION_EPILOGUE is not defined
216 then individual return instructions are generated for each
217 return statement. Args are same as for FUNCTION_PROLOGUE. */
219 #undef TARGET_ASM_FUNCTION_EPILOGUE
220 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
222 /* These hooks specify assembly directives for creating certain kinds
223 of integer object. */
225 #undef TARGET_ASM_ALIGNED_SI_OP
226 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
228 #undef TARGET_ASM_SELECT_RTX_SECTION
229 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
231 #undef TARGET_RTX_COSTS
232 #define TARGET_RTX_COSTS xtensa_rtx_costs
233 #undef TARGET_ADDRESS_COST
234 #define TARGET_ADDRESS_COST hook_int_rtx_0
236 struct gcc_target targetm = TARGET_INITIALIZER;
240 * Functions to test Xtensa immediate operand validity.
244 xtensa_b4constu (v)
245 int v;
247 switch (v)
249 case 32768:
250 case 65536:
251 case 2:
252 case 3:
253 case 4:
254 case 5:
255 case 6:
256 case 7:
257 case 8:
258 case 10:
259 case 12:
260 case 16:
261 case 32:
262 case 64:
263 case 128:
264 case 256:
265 return 1;
267 return 0;
271 xtensa_simm8x256 (v)
272 int v;
274 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
278 xtensa_ai4const (v)
279 int v;
281 return (v == -1 || (v >= 1 && v <= 15));
285 xtensa_simm7 (v)
286 int v;
288 return v >= -32 && v <= 95;
292 xtensa_b4const (v)
293 int v;
295 switch (v)
297 case -1:
298 case 1:
299 case 2:
300 case 3:
301 case 4:
302 case 5:
303 case 6:
304 case 7:
305 case 8:
306 case 10:
307 case 12:
308 case 16:
309 case 32:
310 case 64:
311 case 128:
312 case 256:
313 return 1;
315 return 0;
319 xtensa_simm8 (v)
320 int v;
322 return v >= -128 && v <= 127;
326 xtensa_tp7 (v)
327 int v;
329 return (v >= 7 && v <= 22);
333 xtensa_lsi4x4 (v)
334 int v;
336 return (v & 3) == 0 && (v >= 0 && v <= 60);
340 xtensa_simm12b (v)
341 int v;
343 return v >= -2048 && v <= 2047;
347 xtensa_uimm8 (v)
348 int v;
350 return v >= 0 && v <= 255;
354 xtensa_uimm8x2 (v)
355 int v;
357 return (v & 1) == 0 && (v >= 0 && v <= 510);
361 xtensa_uimm8x4 (v)
362 int v;
364 return (v & 3) == 0 && (v >= 0 && v <= 1020);
368 /* This is just like the standard true_regnum() function except that it
369 works even when reg_renumber is not initialized. */
372 xt_true_regnum (x)
373 rtx x;
375 if (GET_CODE (x) == REG)
377 if (reg_renumber
378 && REGNO (x) >= FIRST_PSEUDO_REGISTER
379 && reg_renumber[REGNO (x)] >= 0)
380 return reg_renumber[REGNO (x)];
381 return REGNO (x);
383 if (GET_CODE (x) == SUBREG)
385 int base = xt_true_regnum (SUBREG_REG (x));
386 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
387 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
388 GET_MODE (SUBREG_REG (x)),
389 SUBREG_BYTE (x), GET_MODE (x));
391 return -1;
396 add_operand (op, mode)
397 rtx op;
398 enum machine_mode mode;
400 if (GET_CODE (op) == CONST_INT)
401 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
403 return register_operand (op, mode);
408 arith_operand (op, mode)
409 rtx op;
410 enum machine_mode mode;
412 if (GET_CODE (op) == CONST_INT)
413 return xtensa_simm8 (INTVAL (op));
415 return register_operand (op, mode);
420 nonimmed_operand (op, mode)
421 rtx op;
422 enum machine_mode mode;
424 /* We cannot use the standard nonimmediate_operand() predicate because
425 it includes constant pool memory operands. */
427 if (memory_operand (op, mode))
428 return !constantpool_address_p (XEXP (op, 0));
430 return register_operand (op, mode);
435 mem_operand (op, mode)
436 rtx op;
437 enum machine_mode mode;
439 /* We cannot use the standard memory_operand() predicate because
440 it includes constant pool memory operands. */
442 if (memory_operand (op, mode))
443 return !constantpool_address_p (XEXP (op, 0));
445 return FALSE;
450 xtensa_valid_move (mode, operands)
451 enum machine_mode mode;
452 rtx *operands;
454 /* Either the destination or source must be a register, and the
455 MAC16 accumulator doesn't count. */
457 if (register_operand (operands[0], mode))
459 int dst_regnum = xt_true_regnum (operands[0]);
461 /* The stack pointer can only be assigned with a MOVSP opcode. */
462 if (dst_regnum == STACK_POINTER_REGNUM)
463 return (mode == SImode
464 && register_operand (operands[1], mode)
465 && !ACC_REG_P (xt_true_regnum (operands[1])));
467 if (!ACC_REG_P (dst_regnum))
468 return true;
470 if (register_operand (operands[1], mode))
472 int src_regnum = xt_true_regnum (operands[1]);
473 if (!ACC_REG_P (src_regnum))
474 return true;
476 return FALSE;
481 mask_operand (op, mode)
482 rtx op;
483 enum machine_mode mode;
485 if (GET_CODE (op) == CONST_INT)
486 return xtensa_mask_immediate (INTVAL (op));
488 return register_operand (op, mode);
493 extui_fldsz_operand (op, mode)
494 rtx op;
495 enum machine_mode mode ATTRIBUTE_UNUSED;
497 return ((GET_CODE (op) == CONST_INT)
498 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
503 sext_operand (op, mode)
504 rtx op;
505 enum machine_mode mode;
507 if (TARGET_SEXT)
508 return nonimmed_operand (op, mode);
509 return mem_operand (op, mode);
514 sext_fldsz_operand (op, mode)
515 rtx op;
516 enum machine_mode mode ATTRIBUTE_UNUSED;
518 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
523 lsbitnum_operand (op, mode)
524 rtx op;
525 enum machine_mode mode ATTRIBUTE_UNUSED;
527 if (GET_CODE (op) == CONST_INT)
529 return (BITS_BIG_ENDIAN
530 ? (INTVAL (op) == BITS_PER_WORD-1)
531 : (INTVAL (op) == 0));
533 return FALSE;
537 static int
538 b4const_or_zero (v)
539 int v;
541 if (v == 0)
542 return TRUE;
543 return xtensa_b4const (v);
548 branch_operand (op, mode)
549 rtx op;
550 enum machine_mode mode;
552 if (GET_CODE (op) == CONST_INT)
553 return b4const_or_zero (INTVAL (op));
555 return register_operand (op, mode);
560 ubranch_operand (op, mode)
561 rtx op;
562 enum machine_mode mode;
564 if (GET_CODE (op) == CONST_INT)
565 return xtensa_b4constu (INTVAL (op));
567 return register_operand (op, mode);
572 call_insn_operand (op, mode)
573 rtx op;
574 enum machine_mode mode ATTRIBUTE_UNUSED;
576 if ((GET_CODE (op) == REG)
577 && (op != arg_pointer_rtx)
578 && ((REGNO (op) < FRAME_POINTER_REGNUM)
579 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
580 return TRUE;
582 if (CONSTANT_ADDRESS_P (op))
584 /* Direct calls only allowed to static functions with PIC. */
585 return (!flag_pic
586 || (GET_CODE (op) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op)));
589 return FALSE;
594 move_operand (op, mode)
595 rtx op;
596 enum machine_mode mode;
598 if (register_operand (op, mode)
599 || memory_operand (op, mode))
600 return TRUE;
602 if (mode == SFmode)
603 return TARGET_CONST16 && CONSTANT_P (op);
605 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
606 result in 0/1. */
607 if (GET_CODE (op) == CONSTANT_P_RTX)
608 return TRUE;
610 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
611 return TRUE;
613 if (mode == SImode)
614 return TARGET_CONST16 && CONSTANT_P (op);
616 return FALSE;
621 smalloffset_mem_p (op)
622 rtx op;
624 if (GET_CODE (op) == MEM)
626 rtx addr = XEXP (op, 0);
627 if (GET_CODE (addr) == REG)
628 return REG_OK_FOR_BASE_P (addr);
629 if (GET_CODE (addr) == PLUS)
631 rtx offset = XEXP (addr, 0);
632 if (GET_CODE (offset) != CONST_INT)
633 offset = XEXP (addr, 1);
634 if (GET_CODE (offset) != CONST_INT)
635 return FALSE;
636 return xtensa_lsi4x4 (INTVAL (offset));
639 return FALSE;
644 smalloffset_double_mem_p (op)
645 rtx op;
647 if (!smalloffset_mem_p (op))
648 return FALSE;
649 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
654 constantpool_address_p (addr)
655 rtx addr;
657 rtx sym = addr;
659 if (GET_CODE (addr) == CONST)
661 rtx offset;
663 /* only handle (PLUS (SYM, OFFSET)) form */
664 addr = XEXP (addr, 0);
665 if (GET_CODE (addr) != PLUS)
666 return FALSE;
668 /* make sure the address is word aligned */
669 offset = XEXP (addr, 1);
670 if ((GET_CODE (offset) != CONST_INT)
671 || ((INTVAL (offset) & 3) != 0))
672 return FALSE;
674 sym = XEXP (addr, 0);
677 if ((GET_CODE (sym) == SYMBOL_REF)
678 && CONSTANT_POOL_ADDRESS_P (sym))
679 return TRUE;
680 return FALSE;
685 constantpool_mem_p (op)
686 rtx op;
688 if (GET_CODE (op) == MEM)
689 return constantpool_address_p (XEXP (op, 0));
690 return FALSE;
694 /* Accept the floating point constant 1 in the appropriate mode. */
697 const_float_1_operand (op, mode)
698 rtx op;
699 enum machine_mode mode;
701 REAL_VALUE_TYPE d;
702 static REAL_VALUE_TYPE onedf;
703 static REAL_VALUE_TYPE onesf;
704 static int one_initialized;
706 if ((GET_CODE (op) != CONST_DOUBLE)
707 || (mode != GET_MODE (op))
708 || (mode != DFmode && mode != SFmode))
709 return FALSE;
711 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
713 if (! one_initialized)
715 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
716 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
717 one_initialized = TRUE;
720 if (mode == DFmode)
721 return REAL_VALUES_EQUAL (d, onedf);
722 else
723 return REAL_VALUES_EQUAL (d, onesf);
728 fpmem_offset_operand (op, mode)
729 rtx op;
730 enum machine_mode mode ATTRIBUTE_UNUSED;
732 if (GET_CODE (op) == CONST_INT)
733 return xtensa_mem_offset (INTVAL (op), SFmode);
734 return 0;
738 void
739 xtensa_extend_reg (dst, src)
740 rtx dst;
741 rtx src;
743 rtx temp = gen_reg_rtx (SImode);
744 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
746 /* generate paradoxical subregs as needed so that the modes match */
747 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
748 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
750 emit_insn (gen_ashlsi3 (temp, src, shift));
751 emit_insn (gen_ashrsi3 (dst, temp, shift));
756 branch_operator (x, mode)
757 rtx x;
758 enum machine_mode mode;
760 if (GET_MODE (x) != mode)
761 return FALSE;
763 switch (GET_CODE (x))
765 case EQ:
766 case NE:
767 case LT:
768 case GE:
769 return TRUE;
770 default:
771 break;
773 return FALSE;
778 ubranch_operator (x, mode)
779 rtx x;
780 enum machine_mode mode;
782 if (GET_MODE (x) != mode)
783 return FALSE;
785 switch (GET_CODE (x))
787 case LTU:
788 case GEU:
789 return TRUE;
790 default:
791 break;
793 return FALSE;
798 boolean_operator (x, mode)
799 rtx x;
800 enum machine_mode mode;
802 if (GET_MODE (x) != mode)
803 return FALSE;
805 switch (GET_CODE (x))
807 case EQ:
808 case NE:
809 return TRUE;
810 default:
811 break;
813 return FALSE;
818 xtensa_mask_immediate (v)
819 int v;
821 #define MAX_MASK_SIZE 16
822 int mask_size;
824 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
826 if ((v & 1) == 0)
827 return FALSE;
828 v = v >> 1;
829 if (v == 0)
830 return TRUE;
833 return FALSE;
838 xtensa_mem_offset (v, mode)
839 unsigned v;
840 enum machine_mode mode;
842 switch (mode)
844 case BLKmode:
845 /* Handle the worst case for block moves. See xtensa_expand_block_move
846 where we emit an optimized block move operation if the block can be
847 moved in < "move_ratio" pieces. The worst case is when the block is
848 aligned but has a size of (3 mod 4) (does this happen?) so that the
849 last piece requires a byte load/store. */
850 return (xtensa_uimm8 (v)
851 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
853 case QImode:
854 return xtensa_uimm8 (v);
856 case HImode:
857 return xtensa_uimm8x2 (v);
859 case DFmode:
860 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
862 default:
863 break;
866 return xtensa_uimm8x4 (v);
870 /* Make normal rtx_code into something we can index from an array */
872 static enum internal_test
873 map_test_to_internal_test (test_code)
874 enum rtx_code test_code;
876 enum internal_test test = ITEST_MAX;
878 switch (test_code)
880 default: break;
881 case EQ: test = ITEST_EQ; break;
882 case NE: test = ITEST_NE; break;
883 case GT: test = ITEST_GT; break;
884 case GE: test = ITEST_GE; break;
885 case LT: test = ITEST_LT; break;
886 case LE: test = ITEST_LE; break;
887 case GTU: test = ITEST_GTU; break;
888 case GEU: test = ITEST_GEU; break;
889 case LTU: test = ITEST_LTU; break;
890 case LEU: test = ITEST_LEU; break;
893 return test;
897 /* Generate the code to compare two integer values. The return value is
898 the comparison expression. */
900 static rtx
901 gen_int_relational (test_code, cmp0, cmp1, p_invert)
902 enum rtx_code test_code; /* relational test (EQ, etc) */
903 rtx cmp0; /* first operand to compare */
904 rtx cmp1; /* second operand to compare */
905 int *p_invert; /* whether branch needs to reverse its test */
907 struct cmp_info {
908 enum rtx_code test_code; /* test code to use in insn */
909 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
910 int const_add; /* constant to add (convert LE -> LT) */
911 int reverse_regs; /* reverse registers in test */
912 int invert_const; /* != 0 if invert value if cmp1 is constant */
913 int invert_reg; /* != 0 if invert value if cmp1 is register */
914 int unsignedp; /* != 0 for unsigned comparisons. */
917 static struct cmp_info info[ (int)ITEST_MAX ] = {
919 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
920 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
922 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
923 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
924 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
925 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
927 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
928 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
929 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
930 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
933 enum internal_test test;
934 enum machine_mode mode;
935 struct cmp_info *p_info;
937 test = map_test_to_internal_test (test_code);
938 if (test == ITEST_MAX)
939 abort ();
941 p_info = &info[ (int)test ];
943 mode = GET_MODE (cmp0);
944 if (mode == VOIDmode)
945 mode = GET_MODE (cmp1);
947 /* Make sure we can handle any constants given to us. */
948 if (GET_CODE (cmp1) == CONST_INT)
950 HOST_WIDE_INT value = INTVAL (cmp1);
951 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
953 /* if the immediate overflows or does not fit in the immediate field,
954 spill it to a register */
956 if ((p_info->unsignedp ?
957 (uvalue + p_info->const_add > uvalue) :
958 (value + p_info->const_add > value)) != (p_info->const_add > 0))
960 cmp1 = force_reg (mode, cmp1);
962 else if (!(p_info->const_range_p) (value + p_info->const_add))
964 cmp1 = force_reg (mode, cmp1);
967 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
969 cmp1 = force_reg (mode, cmp1);
972 /* See if we need to invert the result. */
973 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
974 ? p_info->invert_const
975 : p_info->invert_reg);
977 /* Comparison to constants, may involve adding 1 to change a LT into LE.
978 Comparison between two registers, may involve switching operands. */
979 if (GET_CODE (cmp1) == CONST_INT)
981 if (p_info->const_add != 0)
982 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
985 else if (p_info->reverse_regs)
987 rtx temp = cmp0;
988 cmp0 = cmp1;
989 cmp1 = temp;
992 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
996 /* Generate the code to compare two float values. The return value is
997 the comparison expression. */
999 static rtx
1000 gen_float_relational (test_code, cmp0, cmp1)
1001 enum rtx_code test_code; /* relational test (EQ, etc) */
1002 rtx cmp0; /* first operand to compare */
1003 rtx cmp1; /* second operand to compare */
1005 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1006 rtx brtmp;
1007 int reverse_regs, invert;
1009 switch (test_code)
1011 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1012 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1013 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1014 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1015 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1016 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1017 default:
1018 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1019 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1022 if (reverse_regs)
1024 rtx temp = cmp0;
1025 cmp0 = cmp1;
1026 cmp1 = temp;
1029 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1030 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1032 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1036 void
1037 xtensa_expand_conditional_branch (operands, test_code)
1038 rtx *operands;
1039 enum rtx_code test_code;
1041 enum cmp_type type = branch_type;
1042 rtx cmp0 = branch_cmp[0];
1043 rtx cmp1 = branch_cmp[1];
1044 rtx cmp;
1045 int invert;
1046 rtx label1, label2;
1048 switch (type)
1050 case CMP_DF:
1051 default:
1052 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1054 case CMP_SI:
1055 invert = FALSE;
1056 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1057 break;
1059 case CMP_SF:
1060 if (!TARGET_HARD_FLOAT)
1061 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1062 invert = FALSE;
1063 cmp = gen_float_relational (test_code, cmp0, cmp1);
1064 break;
1067 /* Generate the branch. */
1069 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1070 label2 = pc_rtx;
1072 if (invert)
1074 label2 = label1;
1075 label1 = pc_rtx;
1078 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1079 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1080 label1,
1081 label2)));
1085 static rtx
1086 gen_conditional_move (cmp)
1087 rtx cmp;
1089 enum rtx_code code = GET_CODE (cmp);
1090 rtx op0 = branch_cmp[0];
1091 rtx op1 = branch_cmp[1];
1093 if (branch_type == CMP_SI)
1095 /* Jump optimization calls get_condition() which canonicalizes
1096 comparisons like (GE x <const>) to (GT x <const-1>).
1097 Transform those comparisons back to GE, since that is the
1098 comparison supported in Xtensa. We shouldn't have to
1099 transform <LE x const> comparisons, because neither
1100 xtensa_expand_conditional_branch() nor get_condition() will
1101 produce them. */
1103 if ((code == GT) && (op1 == constm1_rtx))
1105 code = GE;
1106 op1 = const0_rtx;
1108 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1110 if (boolean_operator (cmp, VOIDmode))
1112 /* swap the operands to make const0 second */
1113 if (op0 == const0_rtx)
1115 op0 = op1;
1116 op1 = const0_rtx;
1119 /* if not comparing against zero, emit a comparison (subtract) */
1120 if (op1 != const0_rtx)
1122 op0 = expand_binop (SImode, sub_optab, op0, op1,
1123 0, 0, OPTAB_LIB_WIDEN);
1124 op1 = const0_rtx;
1127 else if (branch_operator (cmp, VOIDmode))
1129 /* swap the operands to make const0 second */
1130 if (op0 == const0_rtx)
1132 op0 = op1;
1133 op1 = const0_rtx;
1135 switch (code)
1137 case LT: code = GE; break;
1138 case GE: code = LT; break;
1139 default: abort ();
1143 if (op1 != const0_rtx)
1144 return 0;
1146 else
1147 return 0;
1149 return gen_rtx (code, VOIDmode, op0, op1);
1152 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1153 return gen_float_relational (code, op0, op1);
1155 return 0;
1160 xtensa_expand_conditional_move (operands, isflt)
1161 rtx *operands;
1162 int isflt;
1164 rtx cmp;
1165 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1167 if (!(cmp = gen_conditional_move (operands[1])))
1168 return 0;
1170 if (isflt)
1171 gen_fn = (branch_type == CMP_SI
1172 ? gen_movsfcc_internal0
1173 : gen_movsfcc_internal1);
1174 else
1175 gen_fn = (branch_type == CMP_SI
1176 ? gen_movsicc_internal0
1177 : gen_movsicc_internal1);
1179 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1180 operands[2], operands[3], cmp));
1181 return 1;
1186 xtensa_expand_scc (operands)
1187 rtx *operands;
1189 rtx dest = operands[0];
1190 rtx cmp = operands[1];
1191 rtx one_tmp, zero_tmp;
1192 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1194 if (!(cmp = gen_conditional_move (cmp)))
1195 return 0;
1197 one_tmp = gen_reg_rtx (SImode);
1198 zero_tmp = gen_reg_rtx (SImode);
1199 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1200 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1202 gen_fn = (branch_type == CMP_SI
1203 ? gen_movsicc_internal0
1204 : gen_movsicc_internal1);
1205 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1206 return 1;
1210 /* Emit insns to move operands[1] into operands[0].
1211 Return 1 if we have written out everything that needs to be done to
1212 do the move. Otherwise, return 0 and the caller will emit the move
1213 normally. */
1216 xtensa_emit_move_sequence (operands, mode)
1217 rtx *operands;
1218 enum machine_mode mode;
1220 if (CONSTANT_P (operands[1])
1221 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1222 && (GET_CODE (operands[1]) != CONST_INT
1223 || !xtensa_simm12b (INTVAL (operands[1]))))
1225 if (!TARGET_CONST16)
1226 operands[1] = force_const_mem (SImode, operands[1]);
1228 /* PC-relative loads are always SImode, and CONST16 is only
1229 supported in the movsi pattern, so add a SUBREG for any other
1230 (smaller) mode. */
1232 if (mode != SImode)
1234 if (register_operand (operands[0], mode))
1236 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1237 emit_move_insn (operands[0], operands[1]);
1238 return 1;
1240 else
1242 operands[1] = force_reg (SImode, operands[1]);
1243 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1248 if (!(reload_in_progress | reload_completed))
1250 if (!xtensa_valid_move (mode, operands))
1251 operands[1] = force_reg (mode, operands[1]);
1253 if (xtensa_copy_incoming_a7 (operands, mode))
1254 return 1;
1257 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1258 instruction won't be recognized after reload, so we remove the
1259 subreg and adjust mem accordingly. */
1260 if (reload_in_progress)
1262 operands[0] = fixup_subreg_mem (operands[0]);
1263 operands[1] = fixup_subreg_mem (operands[1]);
1265 return 0;
1269 static rtx
1270 fixup_subreg_mem (x)
1271 rtx x;
1273 if (GET_CODE (x) == SUBREG
1274 && GET_CODE (SUBREG_REG (x)) == REG
1275 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1277 rtx temp =
1278 gen_rtx_SUBREG (GET_MODE (x),
1279 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1280 SUBREG_BYTE (x));
1281 x = alter_subreg (&temp);
1283 return x;
1287 /* Check if this move is copying an incoming argument in a7. If so,
1288 emit the move, followed by the special "set_frame_ptr"
1289 unspec_volatile insn, at the very beginning of the function. This
1290 is necessary because the register allocator will ignore conflicts
1291 with a7 and may assign some other pseudo to a7. If that pseudo was
1292 assigned prior to this move, it would clobber the incoming argument
1293 in a7. By copying the argument out of a7 as the very first thing,
1294 and then immediately following that with an unspec_volatile to keep
1295 the scheduler away, we should avoid any problems. */
1297 bool
1298 xtensa_copy_incoming_a7 (operands, mode)
1299 rtx *operands;
1300 enum machine_mode mode;
1302 if (a7_overlap_mentioned_p (operands[1])
1303 && !cfun->machine->incoming_a7_copied)
1305 rtx mov;
1306 switch (mode)
1308 case DFmode:
1309 mov = gen_movdf_internal (operands[0], operands[1]);
1310 break;
1311 case SFmode:
1312 mov = gen_movsf_internal (operands[0], operands[1]);
1313 break;
1314 case DImode:
1315 mov = gen_movdi_internal (operands[0], operands[1]);
1316 break;
1317 case SImode:
1318 mov = gen_movsi_internal (operands[0], operands[1]);
1319 break;
1320 case HImode:
1321 mov = gen_movhi_internal (operands[0], operands[1]);
1322 break;
1323 case QImode:
1324 mov = gen_movqi_internal (operands[0], operands[1]);
1325 break;
1326 default:
1327 abort ();
1330 /* Insert the instructions before any other argument copies.
1331 (The set_frame_ptr insn comes _after_ the move, so push it
1332 out first.) */
1333 push_topmost_sequence ();
1334 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1335 emit_insn_after (mov, get_insns ());
1336 pop_topmost_sequence ();
1338 /* Ideally the incoming argument in a7 would only be copied
1339 once, since propagating a7 into the body of a function
1340 will almost certainly lead to errors. However, there is
1341 at least one harmless case (in GCSE) where the original
1342 copy from a7 is changed to copy into a new pseudo. Thus,
1343 we use a flag to only do this special treatment for the
1344 first copy of a7. */
1346 cfun->machine->incoming_a7_copied = true;
1348 return 1;
1351 return 0;
1355 /* Try to expand a block move operation to an RTL block move instruction.
1356 If not optimizing or if the block size is not a constant or if the
1357 block is small, the expansion fails and GCC falls back to calling
1358 memcpy().
1360 operands[0] is the destination
1361 operands[1] is the source
1362 operands[2] is the length
1363 operands[3] is the alignment */
1366 xtensa_expand_block_move (operands)
1367 rtx *operands;
1369 rtx dest = operands[0];
1370 rtx src = operands[1];
1371 int bytes = INTVAL (operands[2]);
1372 int align = XINT (operands[3], 0);
1373 int num_pieces, move_ratio;
1375 /* If this is not a fixed size move, just call memcpy */
1376 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1377 return 0;
1379 /* Anything to move? */
1380 if (bytes <= 0)
1381 return 1;
1383 if (align > MOVE_MAX)
1384 align = MOVE_MAX;
1386 /* decide whether to expand inline based on the optimization level */
1387 move_ratio = 4;
1388 if (optimize > 2)
1389 move_ratio = LARGEST_MOVE_RATIO;
1390 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1391 if (num_pieces >= move_ratio)
1392 return 0;
1394 /* make sure the memory addresses are valid */
1395 operands[0] = validize_mem (dest);
1396 operands[1] = validize_mem (src);
1398 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1399 operands[2], operands[3]));
1400 return 1;
1404 /* Emit a sequence of instructions to implement a block move, trying
1405 to hide load delay slots as much as possible. Load N values into
1406 temporary registers, store those N values, and repeat until the
1407 complete block has been moved. N=delay_slots+1 */
1409 struct meminsnbuf {
1410 char template[30];
1411 rtx operands[2];
1414 void
1415 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1416 rtx *operands;
1417 rtx *tmpregs;
1418 int delay_slots;
1420 rtx dest = operands[0];
1421 rtx src = operands[1];
1422 int bytes = INTVAL (operands[2]);
1423 int align = XINT (operands[3], 0);
1424 rtx from_addr = XEXP (src, 0);
1425 rtx to_addr = XEXP (dest, 0);
1426 int from_struct = MEM_IN_STRUCT_P (src);
1427 int to_struct = MEM_IN_STRUCT_P (dest);
1428 int offset = 0;
1429 int chunk_size, item_size;
1430 struct meminsnbuf *ldinsns, *stinsns;
1431 const char *ldname, *stname;
1432 enum machine_mode mode;
1434 if (align > MOVE_MAX)
1435 align = MOVE_MAX;
1436 item_size = align;
1437 chunk_size = delay_slots + 1;
1439 ldinsns = (struct meminsnbuf *)
1440 alloca (chunk_size * sizeof (struct meminsnbuf));
1441 stinsns = (struct meminsnbuf *)
1442 alloca (chunk_size * sizeof (struct meminsnbuf));
1444 mode = xtensa_find_mode_for_size (item_size);
1445 item_size = GET_MODE_SIZE (mode);
1446 ldname = xtensa_ld_opcodes[(int) mode];
1447 stname = xtensa_st_opcodes[(int) mode];
1449 while (bytes > 0)
1451 int n;
1453 for (n = 0; n < chunk_size; n++)
1455 rtx addr, mem;
1457 if (bytes == 0)
1459 chunk_size = n;
1460 break;
1463 if (bytes < item_size)
1465 /* find a smaller item_size which we can load & store */
1466 item_size = bytes;
1467 mode = xtensa_find_mode_for_size (item_size);
1468 item_size = GET_MODE_SIZE (mode);
1469 ldname = xtensa_ld_opcodes[(int) mode];
1470 stname = xtensa_st_opcodes[(int) mode];
1473 /* record the load instruction opcode and operands */
1474 addr = plus_constant (from_addr, offset);
1475 mem = gen_rtx_MEM (mode, addr);
1476 if (! memory_address_p (mode, addr))
1477 abort ();
1478 MEM_IN_STRUCT_P (mem) = from_struct;
1479 ldinsns[n].operands[0] = tmpregs[n];
1480 ldinsns[n].operands[1] = mem;
1481 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1483 /* record the store instruction opcode and operands */
1484 addr = plus_constant (to_addr, offset);
1485 mem = gen_rtx_MEM (mode, addr);
1486 if (! memory_address_p (mode, addr))
1487 abort ();
1488 MEM_IN_STRUCT_P (mem) = to_struct;
1489 stinsns[n].operands[0] = tmpregs[n];
1490 stinsns[n].operands[1] = mem;
1491 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1493 offset += item_size;
1494 bytes -= item_size;
1497 /* now output the loads followed by the stores */
1498 for (n = 0; n < chunk_size; n++)
1499 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1500 for (n = 0; n < chunk_size; n++)
1501 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1506 static enum machine_mode
1507 xtensa_find_mode_for_size (item_size)
1508 unsigned item_size;
1510 enum machine_mode mode, tmode;
1512 while (1)
1514 mode = VOIDmode;
1516 /* find mode closest to but not bigger than item_size */
1517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1519 if (GET_MODE_SIZE (tmode) <= item_size)
1520 mode = tmode;
1521 if (mode == VOIDmode)
1522 abort ();
1524 item_size = GET_MODE_SIZE (mode);
1526 if (xtensa_ld_opcodes[(int) mode]
1527 && xtensa_st_opcodes[(int) mode])
1528 break;
1530 /* cannot load & store this mode; try something smaller */
1531 item_size -= 1;
1534 return mode;
1538 void
1539 xtensa_expand_nonlocal_goto (operands)
1540 rtx *operands;
1542 rtx goto_handler = operands[1];
1543 rtx containing_fp = operands[3];
1545 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1546 is too big to generate in-line */
1548 if (GET_CODE (containing_fp) != REG)
1549 containing_fp = force_reg (Pmode, containing_fp);
1551 goto_handler = replace_rtx (copy_rtx (goto_handler),
1552 virtual_stack_vars_rtx,
1553 containing_fp);
1555 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1556 0, VOIDmode, 2,
1557 containing_fp, Pmode,
1558 goto_handler, Pmode);
1562 static struct machine_function *
1563 xtensa_init_machine_status ()
1565 return ggc_alloc_cleared (sizeof (struct machine_function));
1569 void
1570 xtensa_setup_frame_addresses ()
1572 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1573 cfun->machine->accesses_prev_frame = 1;
1575 emit_library_call
1576 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1577 0, VOIDmode, 0);
1581 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1582 a comment showing where the end of the loop is. However, if there is a
1583 label or a branch at the end of the loop then we need to place a nop
1584 there. If the loop ends with a label we need the nop so that branches
1585 targetting that label will target the nop (and thus remain in the loop),
1586 instead of targetting the instruction after the loop (and thus exiting
1587 the loop). If the loop ends with a branch, we need the nop in case the
1588 branch is targetting a location inside the loop. When the branch
1589 executes it will cause the loop count to be decremented even if it is
1590 taken (because it is the last instruction in the loop), so we need to
1591 nop after the branch to prevent the loop count from being decremented
1592 when the branch is taken. */
1594 void
1595 xtensa_emit_loop_end (insn, operands)
1596 rtx insn;
1597 rtx *operands;
1599 char done = 0;
1601 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1603 switch (GET_CODE (insn))
1605 case NOTE:
1606 case BARRIER:
1607 break;
1609 case CODE_LABEL:
1610 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1611 done = 1;
1612 break;
1614 default:
1616 rtx body = PATTERN (insn);
1618 if (GET_CODE (body) == JUMP_INSN)
1620 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1621 done = 1;
1623 else if ((GET_CODE (body) != USE)
1624 && (GET_CODE (body) != CLOBBER))
1625 done = 1;
1627 break;
1631 output_asm_insn ("# loop end for %0", operands);
1635 char *
1636 xtensa_emit_call (callop, operands)
1637 int callop;
1638 rtx *operands;
1640 static char result[64];
1641 rtx tgt = operands[callop];
1643 if (GET_CODE (tgt) == CONST_INT)
1644 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1645 else if (register_operand (tgt, VOIDmode))
1646 sprintf (result, "callx8\t%%%d", callop);
1647 else
1648 sprintf (result, "call8\t%%%d", callop);
1650 return result;
1654 /* Return the stabs register number to use for 'regno'. */
1657 xtensa_dbx_register_number (regno)
1658 int regno;
1660 int first = -1;
1662 if (GP_REG_P (regno)) {
1663 regno -= GP_REG_FIRST;
1664 first = 0;
1666 else if (BR_REG_P (regno)) {
1667 regno -= BR_REG_FIRST;
1668 first = 16;
1670 else if (FP_REG_P (regno)) {
1671 regno -= FP_REG_FIRST;
1672 /* The current numbering convention is that TIE registers are
1673 numbered in libcc order beginning with 256. We can't guarantee
1674 that the FP registers will come first, so the following is just
1675 a guess. It seems like we should make a special case for FP
1676 registers and give them fixed numbers < 256. */
1677 first = 256;
1679 else if (ACC_REG_P (regno))
1681 first = 0;
1682 regno = -1;
1685 /* When optimizing, we sometimes get asked about pseudo-registers
1686 that don't represent hard registers. Return 0 for these. */
1687 if (first == -1)
1688 return 0;
1690 return first + regno;
1694 /* Argument support functions. */
1696 /* Initialize CUMULATIVE_ARGS for a function. */
1698 void
1699 init_cumulative_args (cum, fntype, libname)
1700 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1701 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1702 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1704 cum->arg_words = 0;
1707 /* Advance the argument to the next argument position. */
1709 void
1710 function_arg_advance (cum, mode, type)
1711 CUMULATIVE_ARGS *cum; /* current arg information */
1712 enum machine_mode mode; /* current arg mode */
1713 tree type; /* type of the argument or 0 if lib support */
1715 int words, max;
1716 int *arg_words;
1718 arg_words = &cum->arg_words;
1719 max = MAX_ARGS_IN_REGISTERS;
1721 words = (((mode != BLKmode)
1722 ? (int) GET_MODE_SIZE (mode)
1723 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1725 if ((*arg_words + words > max) && (*arg_words < max))
1726 *arg_words = max;
1728 *arg_words += words;
1732 /* Return an RTL expression containing the register for the given mode,
1733 or 0 if the argument is to be passed on the stack. */
1736 function_arg (cum, mode, type, incoming_p)
1737 CUMULATIVE_ARGS *cum; /* current arg information */
1738 enum machine_mode mode; /* current arg mode */
1739 tree type; /* type of the argument or 0 if lib support */
1740 int incoming_p; /* computing the incoming registers? */
1742 int regbase, words, max;
1743 int *arg_words;
1744 int regno;
1745 enum machine_mode result_mode;
1747 arg_words = &cum->arg_words;
1748 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1749 max = MAX_ARGS_IN_REGISTERS;
1751 words = (((mode != BLKmode)
1752 ? (int) GET_MODE_SIZE (mode)
1753 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1755 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1756 *arg_words += (*arg_words & 1);
1758 if (*arg_words + words > max)
1759 return (rtx)0;
1761 regno = regbase + *arg_words;
1762 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1764 /* We need to make sure that references to a7 are represented with
1765 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1766 modes bigger than 2 words (because we only have patterns for
1767 modes of 2 words or smaller), we can't control the expansion
1768 unless we explicitly list the individual registers in a PARALLEL. */
1770 if ((mode == BLKmode || words > 2)
1771 && regno < A7_REG
1772 && regno + words > A7_REG)
1774 rtx result;
1775 int n;
1777 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1778 for (n = 0; n < words; n++)
1780 XVECEXP (result, 0, n) =
1781 gen_rtx_EXPR_LIST (VOIDmode,
1782 gen_raw_REG (SImode, regno + n),
1783 GEN_INT (n * UNITS_PER_WORD));
1785 return result;
1788 return gen_raw_REG (result_mode, regno);
1792 void
1793 override_options ()
1795 int regno;
1796 enum machine_mode mode;
1798 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1799 error ("boolean registers required for the floating-point option");
1801 /* set up the tables of ld/st opcode names for block moves */
1802 xtensa_ld_opcodes[(int) SImode] = "l32i";
1803 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1804 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1805 xtensa_st_opcodes[(int) SImode] = "s32i";
1806 xtensa_st_opcodes[(int) HImode] = "s16i";
1807 xtensa_st_opcodes[(int) QImode] = "s8i";
1809 xtensa_char_to_class['q'] = SP_REG;
1810 xtensa_char_to_class['a'] = GR_REGS;
1811 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1812 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1813 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1814 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1815 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1816 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1817 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1818 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1820 /* Set up array giving whether a given register can hold a given mode. */
1821 for (mode = VOIDmode;
1822 mode != MAX_MACHINE_MODE;
1823 mode = (enum machine_mode) ((int) mode + 1))
1825 int size = GET_MODE_SIZE (mode);
1826 enum mode_class class = GET_MODE_CLASS (mode);
1828 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1830 int temp;
1832 if (ACC_REG_P (regno))
1833 temp = (TARGET_MAC16
1834 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1835 else if (GP_REG_P (regno))
1836 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1837 else if (FP_REG_P (regno))
1838 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1839 else if (BR_REG_P (regno))
1840 temp = (TARGET_BOOLEANS && (mode == CCmode));
1841 else
1842 temp = FALSE;
1844 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1848 init_machine_status = xtensa_init_machine_status;
1850 /* Check PIC settings. PIC is only supported when using L32R
1851 instructions, and some targets need to always use PIC. */
1852 if (flag_pic && TARGET_CONST16)
1853 error ("-f%s is not supported with CONST16 instructions",
1854 (flag_pic > 1 ? "PIC" : "pic"));
1855 else if (XTENSA_ALWAYS_PIC)
1857 if (TARGET_CONST16)
1858 error ("PIC is required but not supported with CONST16 instructions");
1859 flag_pic = 1;
1861 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1862 if (flag_pic > 1)
1863 flag_pic = 1;
1867 /* A C compound statement to output to stdio stream STREAM the
1868 assembler syntax for an instruction operand X. X is an RTL
1869 expression.
1871 CODE is a value that can be used to specify one of several ways
1872 of printing the operand. It is used when identical operands
1873 must be printed differently depending on the context. CODE
1874 comes from the '%' specification that was used to request
1875 printing of the operand. If the specification was just '%DIGIT'
1876 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1877 is the ASCII code for LTR.
1879 If X is a register, this macro should print the register's name.
1880 The names can be found in an array 'reg_names' whose type is
1881 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1883 When the machine description has a specification '%PUNCT' (a '%'
1884 followed by a punctuation character), this macro is called with
1885 a null pointer for X and the punctuation character for CODE.
1887 'a', 'c', 'l', and 'n' are reserved.
1889 The Xtensa specific codes are:
1891 'd' CONST_INT, print as signed decimal
1892 'x' CONST_INT, print as signed hexadecimal
1893 'K' CONST_INT, print number of bits in mask for EXTUI
1894 'R' CONST_INT, print (X & 0x1f)
1895 'L' CONST_INT, print ((32 - X) & 0x1f)
1896 'D' REG, print second register of double-word register operand
1897 'N' MEM, print address of next word following a memory operand
1898 'v' MEM, if memory reference is volatile, output a MEMW before it
1899 't' any constant, add "@h" suffix for top 16 bits
1900 'b' any constant, add "@l" suffix for bottom 16 bits
1903 static void
1904 printx (file, val)
1905 FILE *file;
1906 signed int val;
1908 /* print a hexadecimal value in a nice way */
1909 if ((val > -0xa) && (val < 0xa))
1910 fprintf (file, "%d", val);
1911 else if (val < 0)
1912 fprintf (file, "-0x%x", -val);
1913 else
1914 fprintf (file, "0x%x", val);
1918 void
1919 print_operand (file, x, letter)
1920 FILE *file; /* file to write to */
1921 rtx x; /* operand to print */
1922 int letter; /* %<letter> or 0 */
1924 if (!x)
1925 error ("PRINT_OPERAND null pointer");
1927 switch (letter)
1929 case 'D':
1930 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1931 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1932 else
1933 output_operand_lossage ("invalid %%D value");
1934 break;
1936 case 'v':
1937 if (GET_CODE (x) == MEM)
1939 /* For a volatile memory reference, emit a MEMW before the
1940 load or store. */
1941 if (MEM_VOLATILE_P (x) && TARGET_SERIALIZE_VOLATILE)
1942 fprintf (file, "memw\n\t");
1944 else
1945 output_operand_lossage ("invalid %%v value");
1946 break;
1948 case 'N':
1949 if (GET_CODE (x) == MEM
1950 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1952 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1953 output_address (XEXP (x, 0));
1955 else
1956 output_operand_lossage ("invalid %%N value");
1957 break;
1959 case 'K':
1960 if (GET_CODE (x) == CONST_INT)
1962 int num_bits = 0;
1963 unsigned val = INTVAL (x);
1964 while (val & 1)
1966 num_bits += 1;
1967 val = val >> 1;
1969 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1970 fatal_insn ("invalid mask", x);
1972 fprintf (file, "%d", num_bits);
1974 else
1975 output_operand_lossage ("invalid %%K value");
1976 break;
1978 case 'L':
1979 if (GET_CODE (x) == CONST_INT)
1980 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
1981 else
1982 output_operand_lossage ("invalid %%L value");
1983 break;
1985 case 'R':
1986 if (GET_CODE (x) == CONST_INT)
1987 fprintf (file, "%ld", INTVAL (x) & 0x1f);
1988 else
1989 output_operand_lossage ("invalid %%R value");
1990 break;
1992 case 'x':
1993 if (GET_CODE (x) == CONST_INT)
1994 printx (file, INTVAL (x));
1995 else
1996 output_operand_lossage ("invalid %%x value");
1997 break;
1999 case 'd':
2000 if (GET_CODE (x) == CONST_INT)
2001 fprintf (file, "%ld", INTVAL (x));
2002 else
2003 output_operand_lossage ("invalid %%d value");
2004 break;
2006 case 't':
2007 case 'b':
2008 if (GET_CODE (x) == CONST_INT)
2010 printx (file, INTVAL (x));
2011 fputs (letter == 't' ? "@h" : "@l", file);
2013 else if (GET_CODE (x) == CONST_DOUBLE)
2015 REAL_VALUE_TYPE r;
2016 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2017 if (GET_MODE (x) == SFmode)
2019 long l;
2020 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2021 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2023 else
2024 output_operand_lossage ("invalid %%t/%%b value");
2026 else if (GET_CODE (x) == CONST)
2028 /* X must be a symbolic constant on ELF. Write an expression
2029 suitable for 'const16' that sets the high or low 16 bits. */
2030 if (GET_CODE (XEXP (x, 0)) != PLUS
2031 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2032 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2033 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2034 output_operand_lossage ("invalid %%t/%%b value");
2035 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2036 fputs (letter == 't' ? "@h" : "@l", file);
2037 /* There must be a non-alphanumeric character between 'h' or 'l'
2038 and the number. The '-' is added by print_operand() already. */
2039 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2040 fputs ("+", file);
2041 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2043 else
2045 output_addr_const (file, x);
2046 fputs (letter == 't' ? "@h" : "@l", file);
2048 break;
2050 default:
2051 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2052 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2053 else if (GET_CODE (x) == MEM)
2054 output_address (XEXP (x, 0));
2055 else if (GET_CODE (x) == CONST_INT)
2056 fprintf (file, "%ld", INTVAL (x));
2057 else
2058 output_addr_const (file, x);
2063 /* A C compound statement to output to stdio stream STREAM the
2064 assembler syntax for an instruction operand that is a memory
2065 reference whose address is ADDR. ADDR is an RTL expression. */
2067 void
2068 print_operand_address (file, addr)
2069 FILE *file;
2070 rtx addr;
2072 if (!addr)
2073 error ("PRINT_OPERAND_ADDRESS, null pointer");
2075 switch (GET_CODE (addr))
2077 default:
2078 fatal_insn ("invalid address", addr);
2079 break;
2081 case REG:
2082 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2083 break;
2085 case PLUS:
2087 rtx reg = (rtx)0;
2088 rtx offset = (rtx)0;
2089 rtx arg0 = XEXP (addr, 0);
2090 rtx arg1 = XEXP (addr, 1);
2092 if (GET_CODE (arg0) == REG)
2094 reg = arg0;
2095 offset = arg1;
2097 else if (GET_CODE (arg1) == REG)
2099 reg = arg1;
2100 offset = arg0;
2102 else
2103 fatal_insn ("no register in address", addr);
2105 if (CONSTANT_P (offset))
2107 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2108 output_addr_const (file, offset);
2110 else
2111 fatal_insn ("address offset not a constant", addr);
2113 break;
2115 case LABEL_REF:
2116 case SYMBOL_REF:
2117 case CONST_INT:
2118 case CONST:
2119 output_addr_const (file, addr);
2120 break;
2125 void
2126 xtensa_output_literal (file, x, mode, labelno)
2127 FILE *file;
2128 rtx x;
2129 enum machine_mode mode;
2130 int labelno;
2132 long value_long[2];
2133 REAL_VALUE_TYPE r;
2134 int size;
2136 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2138 switch (GET_MODE_CLASS (mode))
2140 case MODE_FLOAT:
2141 if (GET_CODE (x) != CONST_DOUBLE)
2142 abort ();
2144 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2145 switch (mode)
2147 case SFmode:
2148 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2149 fprintf (file, "0x%08lx\n", value_long[0]);
2150 break;
2152 case DFmode:
2153 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2154 fprintf (file, "0x%08lx, 0x%08lx\n",
2155 value_long[0], value_long[1]);
2156 break;
2158 default:
2159 abort ();
2162 break;
2164 case MODE_INT:
2165 case MODE_PARTIAL_INT:
2166 size = GET_MODE_SIZE (mode);
2167 if (size == 4)
2169 output_addr_const (file, x);
2170 fputs ("\n", file);
2172 else if (size == 8)
2174 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2175 fputs (", ", file);
2176 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2177 fputs ("\n", file);
2179 else
2180 abort ();
2181 break;
2183 default:
2184 abort ();
2189 /* Return the bytes needed to compute the frame pointer from the current
2190 stack pointer. */
2192 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2193 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2195 long
2196 compute_frame_size (size)
2197 int size; /* # of var. bytes allocated */
2199 /* add space for the incoming static chain value */
2200 if (current_function_needs_context)
2201 size += (1 * UNITS_PER_WORD);
2203 xtensa_current_frame_size =
2204 XTENSA_STACK_ALIGN (size
2205 + current_function_outgoing_args_size
2206 + (WINDOW_SIZE * UNITS_PER_WORD));
2207 return xtensa_current_frame_size;
2212 xtensa_frame_pointer_required ()
2214 /* The code to expand builtin_frame_addr and builtin_return_addr
2215 currently uses the hard_frame_pointer instead of frame_pointer.
2216 This seems wrong but maybe it's necessary for other architectures.
2217 This function is derived from the i386 code. */
2219 if (cfun->machine->accesses_prev_frame)
2220 return 1;
2222 return 0;
2226 void
2227 xtensa_expand_prologue ()
2229 HOST_WIDE_INT total_size;
2230 rtx size_rtx;
2232 total_size = compute_frame_size (get_frame_size ());
2233 size_rtx = GEN_INT (total_size);
2235 if (total_size < (1 << (12+3)))
2236 emit_insn (gen_entry (size_rtx, size_rtx));
2237 else
2239 /* Use a8 as a temporary since a0-a7 may be live. */
2240 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2241 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2242 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2243 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2244 emit_move_insn (stack_pointer_rtx, tmp_reg);
2247 if (frame_pointer_needed)
2249 rtx first, insn, set_frame_ptr_insn = 0;
2251 push_topmost_sequence ();
2252 first = get_insns ();
2253 pop_topmost_sequence ();
2255 /* Search all instructions, looking for the insn that sets up the
2256 frame pointer. This search will fail if the function does not
2257 have an incoming argument in $a7, but in that case, we can just
2258 set up the frame pointer at the very beginning of the
2259 function. */
2261 for (insn = first; insn; insn = NEXT_INSN (insn))
2263 rtx pat;
2265 if (!INSN_P (insn))
2266 continue;
2268 pat = PATTERN (insn);
2269 if (GET_CODE (pat) == SET
2270 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2271 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
2273 set_frame_ptr_insn = insn;
2274 break;
2278 if (set_frame_ptr_insn)
2280 /* For all instructions prior to set_frame_ptr_insn, replace
2281 hard_frame_pointer references with stack_pointer. */
2282 for (insn = first;
2283 insn != set_frame_ptr_insn;
2284 insn = NEXT_INSN (insn))
2286 if (INSN_P (insn))
2287 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2288 hard_frame_pointer_rtx,
2289 stack_pointer_rtx);
2292 else
2293 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2298 /* Clear variables at function end. */
2300 void
2301 xtensa_function_epilogue (file, size)
2302 FILE *file ATTRIBUTE_UNUSED;
2303 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2305 xtensa_current_frame_size = 0;
2310 xtensa_return_addr (count, frame)
2311 int count;
2312 rtx frame;
2314 rtx result, retaddr;
2316 if (count == -1)
2317 retaddr = gen_rtx_REG (Pmode, A0_REG);
2318 else
2320 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2321 addr = memory_address (Pmode, addr);
2322 retaddr = gen_reg_rtx (Pmode);
2323 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2326 /* The 2 most-significant bits of the return address on Xtensa hold
2327 the register window size. To get the real return address, these
2328 bits must be replaced with the high bits from the current PC. */
2330 result = gen_reg_rtx (Pmode);
2331 emit_insn (gen_fix_return_addr (result, retaddr));
2332 return result;
2336 /* Create the va_list data type.
2337 This structure is set up by __builtin_saveregs. The __va_reg
2338 field points to a stack-allocated region holding the contents of the
2339 incoming argument registers. The __va_ndx field is an index initialized
2340 to the position of the first unnamed (variable) argument. This same index
2341 is also used to address the arguments passed in memory. Thus, the
2342 __va_stk field is initialized to point to the position of the first
2343 argument in memory offset to account for the arguments passed in
2344 registers. E.G., if there are 6 argument registers, and each register is
2345 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2346 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2347 argument word N for N >= 6. */
2349 tree
2350 xtensa_build_va_list ()
2352 tree f_stk, f_reg, f_ndx, record, type_decl;
2354 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2355 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2357 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2358 ptr_type_node);
2359 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2360 ptr_type_node);
2361 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2362 integer_type_node);
2364 DECL_FIELD_CONTEXT (f_stk) = record;
2365 DECL_FIELD_CONTEXT (f_reg) = record;
2366 DECL_FIELD_CONTEXT (f_ndx) = record;
2368 TREE_CHAIN (record) = type_decl;
2369 TYPE_NAME (record) = type_decl;
2370 TYPE_FIELDS (record) = f_stk;
2371 TREE_CHAIN (f_stk) = f_reg;
2372 TREE_CHAIN (f_reg) = f_ndx;
2374 layout_type (record);
2375 return record;
2379 /* Save the incoming argument registers on the stack. Returns the
2380 address of the saved registers. */
2383 xtensa_builtin_saveregs ()
2385 rtx gp_regs, dest;
2386 int arg_words = current_function_arg_words;
2387 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2388 int i;
2390 if (gp_left == 0)
2391 return const0_rtx;
2393 /* allocate the general-purpose register space */
2394 gp_regs = assign_stack_local
2395 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2396 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2398 /* Now store the incoming registers. */
2399 dest = change_address (gp_regs, SImode,
2400 plus_constant (XEXP (gp_regs, 0),
2401 arg_words * UNITS_PER_WORD));
2403 /* Note: Don't use move_block_from_reg() here because the incoming
2404 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2405 Instead, call gen_raw_REG() directly so that we get a distinct
2406 instance of (REG:SI 7). */
2407 for (i = 0; i < gp_left; i++)
2409 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2410 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2413 return XEXP (gp_regs, 0);
2417 /* Implement `va_start' for varargs and stdarg. We look at the
2418 current function to fill in an initial va_list. */
2420 void
2421 xtensa_va_start (valist, nextarg)
2422 tree valist;
2423 rtx nextarg ATTRIBUTE_UNUSED;
2425 tree f_stk, stk;
2426 tree f_reg, reg;
2427 tree f_ndx, ndx;
2428 tree t, u;
2429 int arg_words;
2431 arg_words = current_function_args_info.arg_words;
2433 f_stk = TYPE_FIELDS (va_list_type_node);
2434 f_reg = TREE_CHAIN (f_stk);
2435 f_ndx = TREE_CHAIN (f_reg);
2437 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2438 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2439 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2441 /* Call __builtin_saveregs; save the result in __va_reg */
2442 current_function_arg_words = arg_words;
2443 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2444 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2445 TREE_SIDE_EFFECTS (t) = 1;
2446 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2448 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2449 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2450 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2451 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2452 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2453 TREE_SIDE_EFFECTS (t) = 1;
2454 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2456 /* Set the __va_ndx member. */
2457 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2458 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2459 TREE_SIDE_EFFECTS (t) = 1;
2460 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2464 /* Implement `va_arg'. */
2467 xtensa_va_arg (valist, type)
2468 tree valist, type;
2470 tree f_stk, stk;
2471 tree f_reg, reg;
2472 tree f_ndx, ndx;
2473 tree tmp, addr_tree, type_size;
2474 rtx array, orig_ndx, r, addr, size, va_size;
2475 rtx lab_false, lab_over, lab_false2;
2477 f_stk = TYPE_FIELDS (va_list_type_node);
2478 f_reg = TREE_CHAIN (f_stk);
2479 f_ndx = TREE_CHAIN (f_reg);
2481 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2482 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2483 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2485 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2487 va_size = gen_reg_rtx (SImode);
2488 tmp = fold (build (MULT_EXPR, sizetype,
2489 fold (build (TRUNC_DIV_EXPR, sizetype,
2490 fold (build (PLUS_EXPR, sizetype,
2491 type_size,
2492 size_int (UNITS_PER_WORD - 1))),
2493 size_int (UNITS_PER_WORD))),
2494 size_int (UNITS_PER_WORD)));
2495 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2496 if (r != va_size)
2497 emit_move_insn (va_size, r);
2500 /* First align __va_ndx to a double word boundary if necessary for this arg:
2502 if (__alignof__ (TYPE) > 4)
2503 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2506 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2508 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2509 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2510 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2511 build_int_2 (-2 * UNITS_PER_WORD, -1));
2512 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2513 TREE_SIDE_EFFECTS (tmp) = 1;
2514 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2518 /* Increment __va_ndx to point past the argument:
2520 orig_ndx = (AP).__va_ndx;
2521 (AP).__va_ndx += __va_size (TYPE);
2524 orig_ndx = gen_reg_rtx (SImode);
2525 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2526 if (r != orig_ndx)
2527 emit_move_insn (orig_ndx, r);
2529 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2530 make_tree (intSI_type_node, va_size));
2531 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2532 TREE_SIDE_EFFECTS (tmp) = 1;
2533 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2536 /* Check if the argument is in registers:
2538 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2539 && !MUST_PASS_IN_STACK (type))
2540 __array = (AP).__va_reg;
2543 array = gen_reg_rtx (Pmode);
2545 lab_over = NULL_RTX;
2546 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2548 lab_false = gen_label_rtx ();
2549 lab_over = gen_label_rtx ();
2551 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2552 EXPAND_NORMAL),
2553 GEN_INT (MAX_ARGS_IN_REGISTERS
2554 * UNITS_PER_WORD),
2555 GT, const1_rtx, SImode, 0, lab_false);
2557 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2558 if (r != array)
2559 emit_move_insn (array, r);
2561 emit_jump_insn (gen_jump (lab_over));
2562 emit_barrier ();
2563 emit_label (lab_false);
2566 /* ...otherwise, the argument is on the stack (never split between
2567 registers and the stack -- change __va_ndx if necessary):
2569 else
2571 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2572 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2573 __array = (AP).__va_stk;
2577 lab_false2 = gen_label_rtx ();
2578 emit_cmp_and_jump_insns (orig_ndx,
2579 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2580 GE, const1_rtx, SImode, 0, lab_false2);
2582 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2583 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2584 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2585 TREE_SIDE_EFFECTS (tmp) = 1;
2586 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2588 emit_label (lab_false2);
2590 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2591 if (r != array)
2592 emit_move_insn (array, r);
2594 if (lab_over != NULL_RTX)
2595 emit_label (lab_over);
2598 /* Given the base array pointer (__array) and index to the subsequent
2599 argument (__va_ndx), find the address:
2601 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2602 ? sizeof (TYPE)
2603 : __va_size (TYPE))
2605 The results are endian-dependent because values smaller than one word
2606 are aligned differently.
2609 size = gen_reg_rtx (SImode);
2610 emit_move_insn (size, va_size);
2612 if (BYTES_BIG_ENDIAN)
2614 rtx lab_use_va_size = gen_label_rtx ();
2616 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2617 EXPAND_NORMAL),
2618 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2619 GE, const1_rtx, SImode, 0, lab_use_va_size);
2621 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2622 if (r != size)
2623 emit_move_insn (size, r);
2625 emit_label (lab_use_va_size);
2628 addr_tree = build (PLUS_EXPR, ptr_type_node,
2629 make_tree (ptr_type_node, array),
2630 ndx);
2631 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2632 make_tree (intSI_type_node, size));
2633 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2634 addr = copy_to_reg (addr);
2635 return addr;
2639 enum reg_class
2640 xtensa_preferred_reload_class (x, class, isoutput)
2641 rtx x;
2642 enum reg_class class;
2643 int isoutput;
2645 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2646 return NO_REGS;
2648 /* Don't use the stack pointer or hard frame pointer for reloads!
2649 The hard frame pointer would normally be OK except that it may
2650 briefly hold an incoming argument in the prologue, and reload
2651 won't know that it is live because the hard frame pointer is
2652 treated specially. */
2654 if (class == AR_REGS || class == GR_REGS)
2655 return RL_REGS;
2657 return class;
2661 enum reg_class
2662 xtensa_secondary_reload_class (class, mode, x, isoutput)
2663 enum reg_class class;
2664 enum machine_mode mode ATTRIBUTE_UNUSED;
2665 rtx x;
2666 int isoutput;
2668 int regno;
2670 if (GET_CODE (x) == SIGN_EXTEND)
2671 x = XEXP (x, 0);
2672 regno = xt_true_regnum (x);
2674 if (!isoutput)
2676 if (class == FP_REGS && constantpool_mem_p (x))
2677 return RL_REGS;
2680 if (ACC_REG_P (regno))
2681 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2682 if (class == ACC_REG)
2683 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2685 return NO_REGS;
2689 void
2690 order_regs_for_local_alloc ()
2692 if (!leaf_function_p ())
2694 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2695 FIRST_PSEUDO_REGISTER * sizeof (int));
2697 else
2699 int i, num_arg_regs;
2700 int nxt = 0;
2702 /* use the AR registers in increasing order (skipping a0 and a1)
2703 but save the incoming argument registers for a last resort */
2704 num_arg_regs = current_function_args_info.arg_words;
2705 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2706 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2707 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2708 reg_alloc_order[nxt++] = i + num_arg_regs;
2709 for (i = 0; i < num_arg_regs; i++)
2710 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2712 /* list the coprocessor registers in order */
2713 for (i = 0; i < BR_REG_NUM; i++)
2714 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2716 /* list the FP registers in order for now */
2717 for (i = 0; i < 16; i++)
2718 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2720 /* GCC requires that we list *all* the registers.... */
2721 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2722 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2723 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2724 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2726 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2731 /* A customized version of reg_overlap_mentioned_p that only looks for
2732 references to a7 (as opposed to hard_frame_pointer_rtx). */
2735 a7_overlap_mentioned_p (x)
2736 rtx x;
2738 int i, j;
2739 unsigned int x_regno;
2740 const char *fmt;
2742 if (GET_CODE (x) == REG)
2744 x_regno = REGNO (x);
2745 return (x != hard_frame_pointer_rtx
2746 && x_regno < A7_REG + 1
2747 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2750 if (GET_CODE (x) == SUBREG
2751 && GET_CODE (SUBREG_REG (x)) == REG
2752 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2754 x_regno = subreg_regno (x);
2755 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2756 && x_regno < A7_REG + 1
2757 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2760 /* X does not match, so try its subexpressions. */
2761 fmt = GET_RTX_FORMAT (GET_CODE (x));
2762 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2764 if (fmt[i] == 'e')
2766 if (a7_overlap_mentioned_p (XEXP (x, i)))
2767 return 1;
2769 else if (fmt[i] == 'E')
2771 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2772 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2773 return 1;
2777 return 0;
2781 /* Some Xtensa targets support multiple bss sections. If the section
2782 name ends with ".bss", add SECTION_BSS to the flags. */
2784 static unsigned int
2785 xtensa_multibss_section_type_flags (decl, name, reloc)
2786 tree decl;
2787 const char *name;
2788 int reloc;
2790 unsigned int flags = default_section_type_flags (decl, name, reloc);
2791 const char *suffix;
2793 suffix = strrchr (name, '.');
2794 if (suffix && strcmp (suffix, ".bss") == 0)
2796 if (!decl || (TREE_CODE (decl) == VAR_DECL
2797 && DECL_INITIAL (decl) == NULL_TREE))
2798 flags |= SECTION_BSS; /* @nobits */
2799 else
2800 warning ("only uninitialized variables can be placed in a "
2801 ".bss section");
2804 return flags;
2808 /* The literal pool stays with the function. */
2810 static void
2811 xtensa_select_rtx_section (mode, x, align)
2812 enum machine_mode mode ATTRIBUTE_UNUSED;
2813 rtx x ATTRIBUTE_UNUSED;
2814 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2816 function_section (current_function_decl);
2819 /* Compute a (partial) cost for rtx X. Return true if the complete
2820 cost has been computed, and false if subexpressions should be
2821 scanned. In either case, *TOTAL contains the cost result. */
2823 static bool
2824 xtensa_rtx_costs (x, code, outer_code, total)
2825 rtx x;
2826 int code, outer_code;
2827 int *total;
2829 switch (code)
2831 case CONST_INT:
2832 switch (outer_code)
2834 case SET:
2835 if (xtensa_simm12b (INTVAL (x)))
2837 *total = 4;
2838 return true;
2840 break;
2841 case PLUS:
2842 if (xtensa_simm8 (INTVAL (x))
2843 || xtensa_simm8x256 (INTVAL (x)))
2845 *total = 0;
2846 return true;
2848 break;
2849 case AND:
2850 if (xtensa_mask_immediate (INTVAL (x)))
2852 *total = 0;
2853 return true;
2855 break;
2856 case COMPARE:
2857 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2859 *total = 0;
2860 return true;
2862 break;
2863 case ASHIFT:
2864 case ASHIFTRT:
2865 case LSHIFTRT:
2866 case ROTATE:
2867 case ROTATERT:
2868 /* no way to tell if X is the 2nd operand so be conservative */
2869 default: break;
2871 if (xtensa_simm12b (INTVAL (x)))
2872 *total = 5;
2873 else if (TARGET_CONST16)
2874 *total = COSTS_N_INSNS (2);
2875 else
2876 *total = 6;
2877 return true;
2879 case CONST:
2880 case LABEL_REF:
2881 case SYMBOL_REF:
2882 if (TARGET_CONST16)
2883 *total = COSTS_N_INSNS (2);
2884 else
2885 *total = 5;
2886 return true;
2888 case CONST_DOUBLE:
2889 if (TARGET_CONST16)
2890 *total = COSTS_N_INSNS (4);
2891 else
2892 *total = 7;
2893 return true;
2895 case MEM:
2897 int num_words =
2898 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2900 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2901 *total = COSTS_N_INSNS (num_words);
2902 else
2903 *total = COSTS_N_INSNS (2*num_words);
2904 return true;
2907 case FFS:
2908 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2909 return true;
2911 case NOT:
2912 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2913 return true;
2915 case AND:
2916 case IOR:
2917 case XOR:
2918 if (GET_MODE (x) == DImode)
2919 *total = COSTS_N_INSNS (2);
2920 else
2921 *total = COSTS_N_INSNS (1);
2922 return true;
2924 case ASHIFT:
2925 case ASHIFTRT:
2926 case LSHIFTRT:
2927 if (GET_MODE (x) == DImode)
2928 *total = COSTS_N_INSNS (50);
2929 else
2930 *total = COSTS_N_INSNS (1);
2931 return true;
2933 case ABS:
2935 enum machine_mode xmode = GET_MODE (x);
2936 if (xmode == SFmode)
2937 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2938 else if (xmode == DFmode)
2939 *total = COSTS_N_INSNS (50);
2940 else
2941 *total = COSTS_N_INSNS (4);
2942 return true;
2945 case PLUS:
2946 case MINUS:
2948 enum machine_mode xmode = GET_MODE (x);
2949 if (xmode == SFmode)
2950 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2951 else if (xmode == DFmode || xmode == DImode)
2952 *total = COSTS_N_INSNS (50);
2953 else
2954 *total = COSTS_N_INSNS (1);
2955 return true;
2958 case NEG:
2959 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2960 return true;
2962 case MULT:
2964 enum machine_mode xmode = GET_MODE (x);
2965 if (xmode == SFmode)
2966 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2967 else if (xmode == DFmode || xmode == DImode)
2968 *total = COSTS_N_INSNS (50);
2969 else if (TARGET_MUL32)
2970 *total = COSTS_N_INSNS (4);
2971 else if (TARGET_MAC16)
2972 *total = COSTS_N_INSNS (16);
2973 else if (TARGET_MUL16)
2974 *total = COSTS_N_INSNS (12);
2975 else
2976 *total = COSTS_N_INSNS (50);
2977 return true;
2980 case DIV:
2981 case MOD:
2983 enum machine_mode xmode = GET_MODE (x);
2984 if (xmode == SFmode)
2986 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2987 return true;
2989 else if (xmode == DFmode)
2991 *total = COSTS_N_INSNS (50);
2992 return true;
2995 /* fall through */
2997 case UDIV:
2998 case UMOD:
3000 enum machine_mode xmode = GET_MODE (x);
3001 if (xmode == DImode)
3002 *total = COSTS_N_INSNS (50);
3003 else if (TARGET_DIV32)
3004 *total = COSTS_N_INSNS (32);
3005 else
3006 *total = COSTS_N_INSNS (50);
3007 return true;
3010 case SQRT:
3011 if (GET_MODE (x) == SFmode)
3012 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3013 else
3014 *total = COSTS_N_INSNS (50);
3015 return true;
3017 case SMIN:
3018 case UMIN:
3019 case SMAX:
3020 case UMAX:
3021 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3022 return true;
3024 case SIGN_EXTRACT:
3025 case SIGN_EXTEND:
3026 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3027 return true;
3029 case ZERO_EXTRACT:
3030 case ZERO_EXTEND:
3031 *total = COSTS_N_INSNS (1);
3032 return true;
3034 default:
3035 return false;
3039 #include "gt-xtensa.h"