* config/xtensa/xtensa.c (override_options): Don't warn about
[official-gcc.git] / gcc / config / xtensa / xtensa.c
blobdd9ad7d8005dad26df046e07d39656f376b9fa47
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "machmode.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "toplev.h"
44 #include "optabs.h"
45 #include "output.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
54 of EQ, NE, etc. */
56 enum internal_test {
57 ITEST_EQ,
58 ITEST_NE,
59 ITEST_GT,
60 ITEST_GE,
61 ITEST_LT,
62 ITEST_LE,
63 ITEST_GTU,
64 ITEST_GEU,
65 ITEST_LTU,
66 ITEST_LEU,
67 ITEST_MAX
70 /* Cached operands, and operator to compare for use in set/branch on
71 condition codes. */
72 rtx branch_cmp[2];
74 /* what type of branch to use */
75 enum cmp_type branch_type;
77 /* Array giving truth value on whether or not a given hard register
78 can support a given mode. */
79 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
81 /* Current frame size calculated by compute_frame_size. */
82 unsigned xtensa_current_frame_size;
84 /* Tables of ld/st opcode names for block moves */
85 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
86 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
87 #define LARGEST_MOVE_RATIO 15
89 /* Define the structure for the machine field in struct function. */
90 struct machine_function GTY(())
92 int accesses_prev_frame;
95 /* Vector, indexed by hard register number, which contains 1 for a
96 register that is allowable in a candidate for leaf function
97 treatment. */
99 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1, 1, 1,
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 /* Map hard register number to register class */
108 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
110 GR_REGS, SP_REG, GR_REGS, GR_REGS,
111 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
112 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
113 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
114 AR_REGS, AR_REGS, BR_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 ACC_REG,
122 /* Map register constraint character to register class. */
123 enum reg_class xtensa_char_to_class[256] =
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 static int b4const_or_zero PARAMS ((int));
192 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
193 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
194 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
195 static rtx gen_conditional_move PARAMS ((rtx));
196 static rtx fixup_subreg_mem PARAMS ((rtx x));
197 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
198 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
199 static void printx PARAMS ((FILE *, signed int));
200 static void xtensa_select_rtx_section PARAMS ((enum machine_mode, rtx,
201 unsigned HOST_WIDE_INT));
202 static void xtensa_encode_section_info PARAMS ((tree, int));
204 static rtx frame_size_const;
205 static int current_function_arg_words;
206 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
207 REG_ALLOC_ORDER;
209 /* This macro generates the assembly code for function entry.
210 FILE is a stdio stream to output the code to.
211 SIZE is an int: how many units of temporary storage to allocate.
212 Refer to the array 'regs_ever_live' to determine which registers
213 to save; 'regs_ever_live[I]' is nonzero if register number I
214 is ever used in the function. This macro is responsible for
215 knowing which registers should not be saved even if used. */
217 #undef TARGET_ASM_FUNCTION_PROLOGUE
218 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
220 /* This macro generates the assembly code for function exit,
221 on machines that need it. If FUNCTION_EPILOGUE is not defined
222 then individual return instructions are generated for each
223 return statement. Args are same as for FUNCTION_PROLOGUE. */
225 #undef TARGET_ASM_FUNCTION_EPILOGUE
226 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
228 /* These hooks specify assembly directives for creating certain kinds
229 of integer object. */
231 #undef TARGET_ASM_ALIGNED_SI_OP
232 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
234 #undef TARGET_ASM_SELECT_RTX_SECTION
235 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
236 #undef TARGET_ENCODE_SECTION_INFO
237 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
239 struct gcc_target targetm = TARGET_INITIALIZER;
243 * Functions to test Xtensa immediate operand validity.
247 xtensa_b4constu (v)
248 int v;
250 switch (v)
252 case 32768:
253 case 65536:
254 case 2:
255 case 3:
256 case 4:
257 case 5:
258 case 6:
259 case 7:
260 case 8:
261 case 10:
262 case 12:
263 case 16:
264 case 32:
265 case 64:
266 case 128:
267 case 256:
268 return 1;
270 return 0;
274 xtensa_simm8x256 (v)
275 int v;
277 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
281 xtensa_ai4const (v)
282 int v;
284 return (v == -1 || (v >= 1 && v <= 15));
288 xtensa_simm7 (v)
289 int v;
291 return v >= -32 && v <= 95;
295 xtensa_b4const (v)
296 int v;
298 switch (v)
300 case -1:
301 case 1:
302 case 2:
303 case 3:
304 case 4:
305 case 5:
306 case 6:
307 case 7:
308 case 8:
309 case 10:
310 case 12:
311 case 16:
312 case 32:
313 case 64:
314 case 128:
315 case 256:
316 return 1;
318 return 0;
322 xtensa_simm8 (v)
323 int v;
325 return v >= -128 && v <= 127;
329 xtensa_tp7 (v)
330 int v;
332 return (v >= 7 && v <= 22);
336 xtensa_lsi4x4 (v)
337 int v;
339 return (v & 3) == 0 && (v >= 0 && v <= 60);
343 xtensa_simm12b (v)
344 int v;
346 return v >= -2048 && v <= 2047;
350 xtensa_uimm8 (v)
351 int v;
353 return v >= 0 && v <= 255;
357 xtensa_uimm8x2 (v)
358 int v;
360 return (v & 1) == 0 && (v >= 0 && v <= 510);
364 xtensa_uimm8x4 (v)
365 int v;
367 return (v & 3) == 0 && (v >= 0 && v <= 1020);
371 /* This is just like the standard true_regnum() function except that it
372 works even when reg_renumber is not initialized. */
375 xt_true_regnum (x)
376 rtx x;
378 if (GET_CODE (x) == REG)
380 if (reg_renumber
381 && REGNO (x) >= FIRST_PSEUDO_REGISTER
382 && reg_renumber[REGNO (x)] >= 0)
383 return reg_renumber[REGNO (x)];
384 return REGNO (x);
386 if (GET_CODE (x) == SUBREG)
388 int base = xt_true_regnum (SUBREG_REG (x));
389 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
390 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
391 GET_MODE (SUBREG_REG (x)),
392 SUBREG_BYTE (x), GET_MODE (x));
394 return -1;
399 add_operand (op, mode)
400 rtx op;
401 enum machine_mode mode;
403 if (GET_CODE (op) == CONST_INT)
404 return (xtensa_simm8 (INTVAL (op)) ||
405 xtensa_simm8x256 (INTVAL (op)));
407 return register_operand (op, mode);
412 arith_operand (op, mode)
413 rtx op;
414 enum machine_mode mode;
416 if (GET_CODE (op) == CONST_INT)
417 return xtensa_simm8 (INTVAL (op));
419 return register_operand (op, mode);
424 nonimmed_operand (op, mode)
425 rtx op;
426 enum machine_mode mode;
428 /* We cannot use the standard nonimmediate_operand() predicate because
429 it includes constant pool memory operands. */
431 if (memory_operand (op, mode))
432 return !constantpool_address_p (XEXP (op, 0));
434 return register_operand (op, mode);
439 mem_operand (op, mode)
440 rtx op;
441 enum machine_mode mode;
443 /* We cannot use the standard memory_operand() predicate because
444 it includes constant pool memory operands. */
446 if (memory_operand (op, mode))
447 return !constantpool_address_p (XEXP (op, 0));
449 return FALSE;
454 xtensa_valid_move (mode, operands)
455 enum machine_mode mode;
456 rtx *operands;
458 /* Either the destination or source must be a register, and the
459 MAC16 accumulator doesn't count. */
461 if (register_operand (operands[0], mode))
463 int dst_regnum = xt_true_regnum (operands[0]);
465 /* The stack pointer can only be assigned with a MOVSP opcode. */
466 if (dst_regnum == STACK_POINTER_REGNUM)
467 return (mode == SImode
468 && register_operand (operands[1], mode)
469 && !ACC_REG_P (xt_true_regnum (operands[1])));
471 if (!ACC_REG_P (dst_regnum))
472 return true;
474 if (register_operand (operands[1], mode))
476 int src_regnum = xt_true_regnum (operands[1]);
477 if (!ACC_REG_P (src_regnum))
478 return true;
480 return FALSE;
485 mask_operand (op, mode)
486 rtx op;
487 enum machine_mode mode;
489 if (GET_CODE (op) == CONST_INT)
490 return xtensa_mask_immediate (INTVAL (op));
492 return register_operand (op, mode);
497 extui_fldsz_operand (op, mode)
498 rtx op;
499 enum machine_mode mode ATTRIBUTE_UNUSED;
501 return ((GET_CODE (op) == CONST_INT)
502 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
507 sext_operand (op, mode)
508 rtx op;
509 enum machine_mode mode;
511 if (TARGET_SEXT)
512 return nonimmed_operand (op, mode);
513 return mem_operand (op, mode);
518 sext_fldsz_operand (op, mode)
519 rtx op;
520 enum machine_mode mode ATTRIBUTE_UNUSED;
522 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
527 lsbitnum_operand (op, mode)
528 rtx op;
529 enum machine_mode mode ATTRIBUTE_UNUSED;
531 if (GET_CODE (op) == CONST_INT)
533 return (BITS_BIG_ENDIAN
534 ? (INTVAL (op) == BITS_PER_WORD-1)
535 : (INTVAL (op) == 0));
537 return FALSE;
541 static int
542 b4const_or_zero (v)
543 int v;
545 if (v == 0)
546 return TRUE;
547 return xtensa_b4const (v);
552 branch_operand (op, mode)
553 rtx op;
554 enum machine_mode mode;
556 if (GET_CODE (op) == CONST_INT)
557 return b4const_or_zero (INTVAL (op));
559 return register_operand (op, mode);
564 ubranch_operand (op, mode)
565 rtx op;
566 enum machine_mode mode;
568 if (GET_CODE (op) == CONST_INT)
569 return xtensa_b4constu (INTVAL (op));
571 return register_operand (op, mode);
576 call_insn_operand (op, mode)
577 rtx op;
578 enum machine_mode mode ATTRIBUTE_UNUSED;
580 if ((GET_CODE (op) == REG)
581 && (op != arg_pointer_rtx)
582 && ((REGNO (op) < FRAME_POINTER_REGNUM)
583 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
584 return TRUE;
586 if (CONSTANT_ADDRESS_P (op))
588 /* Direct calls only allowed to static functions with PIC. */
589 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
590 && SYMBOL_REF_FLAG (op)));
593 return FALSE;
598 move_operand (op, mode)
599 rtx op;
600 enum machine_mode mode;
602 if (register_operand (op, mode))
603 return TRUE;
605 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
606 result in 0/1. */
607 if (GET_CODE (op) == CONSTANT_P_RTX)
608 return TRUE;
610 if (GET_CODE (op) == CONST_INT)
611 return xtensa_simm12b (INTVAL (op));
613 if (GET_CODE (op) == MEM)
614 return memory_address_p (mode, XEXP (op, 0));
616 return FALSE;
621 smalloffset_mem_p (op)
622 rtx op;
624 if (GET_CODE (op) == MEM)
626 rtx addr = XEXP (op, 0);
627 if (GET_CODE (addr) == REG)
628 return REG_OK_FOR_BASE_P (addr);
629 if (GET_CODE (addr) == PLUS)
631 rtx offset = XEXP (addr, 0);
632 if (GET_CODE (offset) != CONST_INT)
633 offset = XEXP (addr, 1);
634 if (GET_CODE (offset) != CONST_INT)
635 return FALSE;
636 return xtensa_lsi4x4 (INTVAL (offset));
639 return FALSE;
644 smalloffset_double_mem_p (op)
645 rtx op;
647 if (!smalloffset_mem_p (op))
648 return FALSE;
649 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
654 constantpool_address_p (addr)
655 rtx addr;
657 rtx sym = addr;
659 if (GET_CODE (addr) == CONST)
661 rtx offset;
663 /* only handle (PLUS (SYM, OFFSET)) form */
664 addr = XEXP (addr, 0);
665 if (GET_CODE (addr) != PLUS)
666 return FALSE;
668 /* make sure the address is word aligned */
669 offset = XEXP (addr, 1);
670 if ((GET_CODE (offset) != CONST_INT)
671 || ((INTVAL (offset) & 3) != 0))
672 return FALSE;
674 sym = XEXP (addr, 0);
677 if ((GET_CODE (sym) == SYMBOL_REF)
678 && CONSTANT_POOL_ADDRESS_P (sym))
679 return TRUE;
680 return FALSE;
685 constantpool_mem_p (op)
686 rtx op;
688 if (GET_CODE (op) == MEM)
689 return constantpool_address_p (XEXP (op, 0));
690 return FALSE;
695 non_const_move_operand (op, mode)
696 rtx op;
697 enum machine_mode mode;
699 if (register_operand (op, mode))
700 return 1;
701 if (GET_CODE (op) == SUBREG)
702 op = SUBREG_REG (op);
703 if (GET_CODE (op) == MEM)
704 return memory_address_p (mode, XEXP (op, 0));
705 return FALSE;
709 /* Accept the floating point constant 1 in the appropriate mode. */
712 const_float_1_operand (op, mode)
713 rtx op;
714 enum machine_mode mode;
716 REAL_VALUE_TYPE d;
717 static REAL_VALUE_TYPE onedf;
718 static REAL_VALUE_TYPE onesf;
719 static int one_initialized;
721 if ((GET_CODE (op) != CONST_DOUBLE)
722 || (mode != GET_MODE (op))
723 || (mode != DFmode && mode != SFmode))
724 return FALSE;
726 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
728 if (! one_initialized)
730 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
731 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
732 one_initialized = TRUE;
735 if (mode == DFmode)
736 return REAL_VALUES_EQUAL (d, onedf);
737 else
738 return REAL_VALUES_EQUAL (d, onesf);
743 fpmem_offset_operand (op, mode)
744 rtx op;
745 enum machine_mode mode ATTRIBUTE_UNUSED;
747 if (GET_CODE (op) == CONST_INT)
748 return xtensa_mem_offset (INTVAL (op), SFmode);
749 return 0;
753 void
754 xtensa_extend_reg (dst, src)
755 rtx dst;
756 rtx src;
758 rtx temp = gen_reg_rtx (SImode);
759 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
761 /* generate paradoxical subregs as needed so that the modes match */
762 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
763 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
765 emit_insn (gen_ashlsi3 (temp, src, shift));
766 emit_insn (gen_ashrsi3 (dst, temp, shift));
770 void
771 xtensa_load_constant (dst, src)
772 rtx dst;
773 rtx src;
775 enum machine_mode mode = GET_MODE (dst);
776 src = force_const_mem (SImode, src);
778 /* PC-relative loads are always SImode so we have to add a SUBREG if that
779 is not the desired mode */
781 if (mode != SImode)
783 if (register_operand (dst, mode))
784 dst = simplify_gen_subreg (SImode, dst, mode, 0);
785 else
787 src = force_reg (SImode, src);
788 src = gen_lowpart_SUBREG (mode, src);
792 emit_move_insn (dst, src);
797 branch_operator (x, mode)
798 rtx x;
799 enum machine_mode mode;
801 if (GET_MODE (x) != mode)
802 return FALSE;
804 switch (GET_CODE (x))
806 case EQ:
807 case NE:
808 case LT:
809 case GE:
810 return TRUE;
811 default:
812 break;
814 return FALSE;
819 ubranch_operator (x, mode)
820 rtx x;
821 enum machine_mode mode;
823 if (GET_MODE (x) != mode)
824 return FALSE;
826 switch (GET_CODE (x))
828 case LTU:
829 case GEU:
830 return TRUE;
831 default:
832 break;
834 return FALSE;
839 boolean_operator (x, mode)
840 rtx x;
841 enum machine_mode mode;
843 if (GET_MODE (x) != mode)
844 return FALSE;
846 switch (GET_CODE (x))
848 case EQ:
849 case NE:
850 return TRUE;
851 default:
852 break;
854 return FALSE;
859 xtensa_mask_immediate (v)
860 int v;
862 #define MAX_MASK_SIZE 16
863 int mask_size;
865 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
867 if ((v & 1) == 0)
868 return FALSE;
869 v = v >> 1;
870 if (v == 0)
871 return TRUE;
874 return FALSE;
879 xtensa_mem_offset (v, mode)
880 unsigned v;
881 enum machine_mode mode;
883 switch (mode)
885 case BLKmode:
886 /* Handle the worst case for block moves. See xtensa_expand_block_move
887 where we emit an optimized block move operation if the block can be
888 moved in < "move_ratio" pieces. The worst case is when the block is
889 aligned but has a size of (3 mod 4) (does this happen?) so that the
890 last piece requires a byte load/store. */
891 return (xtensa_uimm8 (v) &&
892 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
894 case QImode:
895 return xtensa_uimm8 (v);
897 case HImode:
898 return xtensa_uimm8x2 (v);
900 case DFmode:
901 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
903 default:
904 break;
907 return xtensa_uimm8x4 (v);
911 /* Make normal rtx_code into something we can index from an array */
913 static enum internal_test
914 map_test_to_internal_test (test_code)
915 enum rtx_code test_code;
917 enum internal_test test = ITEST_MAX;
919 switch (test_code)
921 default: break;
922 case EQ: test = ITEST_EQ; break;
923 case NE: test = ITEST_NE; break;
924 case GT: test = ITEST_GT; break;
925 case GE: test = ITEST_GE; break;
926 case LT: test = ITEST_LT; break;
927 case LE: test = ITEST_LE; break;
928 case GTU: test = ITEST_GTU; break;
929 case GEU: test = ITEST_GEU; break;
930 case LTU: test = ITEST_LTU; break;
931 case LEU: test = ITEST_LEU; break;
934 return test;
938 /* Generate the code to compare two integer values. The return value is
939 the comparison expression. */
941 static rtx
942 gen_int_relational (test_code, cmp0, cmp1, p_invert)
943 enum rtx_code test_code; /* relational test (EQ, etc) */
944 rtx cmp0; /* first operand to compare */
945 rtx cmp1; /* second operand to compare */
946 int *p_invert; /* whether branch needs to reverse its test */
948 struct cmp_info {
949 enum rtx_code test_code; /* test code to use in insn */
950 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
951 int const_add; /* constant to add (convert LE -> LT) */
952 int reverse_regs; /* reverse registers in test */
953 int invert_const; /* != 0 if invert value if cmp1 is constant */
954 int invert_reg; /* != 0 if invert value if cmp1 is register */
955 int unsignedp; /* != 0 for unsigned comparisons. */
958 static struct cmp_info info[ (int)ITEST_MAX ] = {
960 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
961 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
963 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
964 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
965 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
966 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
968 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
969 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
970 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
971 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
974 enum internal_test test;
975 enum machine_mode mode;
976 struct cmp_info *p_info;
978 test = map_test_to_internal_test (test_code);
979 if (test == ITEST_MAX)
980 abort ();
982 p_info = &info[ (int)test ];
984 mode = GET_MODE (cmp0);
985 if (mode == VOIDmode)
986 mode = GET_MODE (cmp1);
988 /* Make sure we can handle any constants given to us. */
989 if (GET_CODE (cmp1) == CONST_INT)
991 HOST_WIDE_INT value = INTVAL (cmp1);
992 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
994 /* if the immediate overflows or does not fit in the immediate field,
995 spill it to a register */
997 if ((p_info->unsignedp ?
998 (uvalue + p_info->const_add > uvalue) :
999 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1001 cmp1 = force_reg (mode, cmp1);
1003 else if (!(p_info->const_range_p) (value + p_info->const_add))
1005 cmp1 = force_reg (mode, cmp1);
1008 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1010 cmp1 = force_reg (mode, cmp1);
1013 /* See if we need to invert the result. */
1014 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1015 ? p_info->invert_const
1016 : p_info->invert_reg);
1018 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1019 Comparison between two registers, may involve switching operands. */
1020 if (GET_CODE (cmp1) == CONST_INT)
1022 if (p_info->const_add != 0)
1023 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1026 else if (p_info->reverse_regs)
1028 rtx temp = cmp0;
1029 cmp0 = cmp1;
1030 cmp1 = temp;
1033 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1037 /* Generate the code to compare two float values. The return value is
1038 the comparison expression. */
1040 static rtx
1041 gen_float_relational (test_code, cmp0, cmp1)
1042 enum rtx_code test_code; /* relational test (EQ, etc) */
1043 rtx cmp0; /* first operand to compare */
1044 rtx cmp1; /* second operand to compare */
1046 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1047 rtx brtmp;
1048 int reverse_regs, invert;
1050 switch (test_code)
1052 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1053 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1054 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1055 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1056 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1057 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1058 default:
1059 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1060 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1063 if (reverse_regs)
1065 rtx temp = cmp0;
1066 cmp0 = cmp1;
1067 cmp1 = temp;
1070 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1071 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1073 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1077 void
1078 xtensa_expand_conditional_branch (operands, test_code)
1079 rtx *operands;
1080 enum rtx_code test_code;
1082 enum cmp_type type = branch_type;
1083 rtx cmp0 = branch_cmp[0];
1084 rtx cmp1 = branch_cmp[1];
1085 rtx cmp;
1086 int invert;
1087 rtx label1, label2;
1089 switch (type)
1091 case CMP_DF:
1092 default:
1093 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1095 case CMP_SI:
1096 invert = FALSE;
1097 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1098 break;
1100 case CMP_SF:
1101 if (!TARGET_HARD_FLOAT)
1102 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1103 invert = FALSE;
1104 cmp = gen_float_relational (test_code, cmp0, cmp1);
1105 break;
1108 /* Generate the branch. */
1110 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1111 label2 = pc_rtx;
1113 if (invert)
1115 label2 = label1;
1116 label1 = pc_rtx;
1119 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1120 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1121 label1,
1122 label2)));
1126 static rtx
1127 gen_conditional_move (cmp)
1128 rtx cmp;
1130 enum rtx_code code = GET_CODE (cmp);
1131 rtx op0 = branch_cmp[0];
1132 rtx op1 = branch_cmp[1];
1134 if (branch_type == CMP_SI)
1136 /* Jump optimization calls get_condition() which canonicalizes
1137 comparisons like (GE x <const>) to (GT x <const-1>).
1138 Transform those comparisons back to GE, since that is the
1139 comparison supported in Xtensa. We shouldn't have to
1140 transform <LE x const> comparisons, because neither
1141 xtensa_expand_conditional_branch() nor get_condition() will
1142 produce them. */
1144 if ((code == GT) && (op1 == constm1_rtx))
1146 code = GE;
1147 op1 = const0_rtx;
1149 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1151 if (boolean_operator (cmp, VOIDmode))
1153 /* swap the operands to make const0 second */
1154 if (op0 == const0_rtx)
1156 op0 = op1;
1157 op1 = const0_rtx;
1160 /* if not comparing against zero, emit a comparison (subtract) */
1161 if (op1 != const0_rtx)
1163 op0 = expand_binop (SImode, sub_optab, op0, op1,
1164 0, 0, OPTAB_LIB_WIDEN);
1165 op1 = const0_rtx;
1168 else if (branch_operator (cmp, VOIDmode))
1170 /* swap the operands to make const0 second */
1171 if (op0 == const0_rtx)
1173 op0 = op1;
1174 op1 = const0_rtx;
1176 switch (code)
1178 case LT: code = GE; break;
1179 case GE: code = LT; break;
1180 default: abort ();
1184 if (op1 != const0_rtx)
1185 return 0;
1187 else
1188 return 0;
1190 return gen_rtx (code, VOIDmode, op0, op1);
1193 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1194 return gen_float_relational (code, op0, op1);
1196 return 0;
1201 xtensa_expand_conditional_move (operands, isflt)
1202 rtx *operands;
1203 int isflt;
1205 rtx cmp;
1206 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1208 if (!(cmp = gen_conditional_move (operands[1])))
1209 return 0;
1211 if (isflt)
1212 gen_fn = (branch_type == CMP_SI
1213 ? gen_movsfcc_internal0
1214 : gen_movsfcc_internal1);
1215 else
1216 gen_fn = (branch_type == CMP_SI
1217 ? gen_movsicc_internal0
1218 : gen_movsicc_internal1);
1220 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1221 operands[2], operands[3], cmp));
1222 return 1;
1227 xtensa_expand_scc (operands)
1228 rtx *operands;
1230 rtx dest = operands[0];
1231 rtx cmp = operands[1];
1232 rtx one_tmp, zero_tmp;
1233 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1235 if (!(cmp = gen_conditional_move (cmp)))
1236 return 0;
1238 one_tmp = gen_reg_rtx (SImode);
1239 zero_tmp = gen_reg_rtx (SImode);
1240 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1241 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1243 gen_fn = (branch_type == CMP_SI
1244 ? gen_movsicc_internal0
1245 : gen_movsicc_internal1);
1246 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1247 return 1;
1251 /* Emit insns to move operands[1] into operands[0].
1253 Return 1 if we have written out everything that needs to be done to
1254 do the move. Otherwise, return 0 and the caller will emit the move
1255 normally. */
1258 xtensa_emit_move_sequence (operands, mode)
1259 rtx *operands;
1260 enum machine_mode mode;
1262 if (CONSTANT_P (operands[1])
1263 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1264 && (GET_CODE (operands[1]) != CONST_INT
1265 || !xtensa_simm12b (INTVAL (operands[1]))))
1267 xtensa_load_constant (operands[0], operands[1]);
1268 return 1;
1271 if (!(reload_in_progress | reload_completed))
1273 if (!xtensa_valid_move (mode, operands))
1274 operands[1] = force_reg (mode, operands[1]);
1276 /* Check if this move is copying an incoming argument in a7. If
1277 so, emit the move, followed by the special "set_frame_ptr"
1278 unspec_volatile insn, at the very beginning of the function.
1279 This is necessary because the register allocator will ignore
1280 conflicts with a7 and may assign some other pseudo to a7. If
1281 that pseudo was assigned prior to this move, it would clobber
1282 the incoming argument in a7. By copying the argument out of
1283 a7 as the very first thing, and then immediately following
1284 that with an unspec_volatile to keep the scheduler away, we
1285 should avoid any problems. */
1287 if (a7_overlap_mentioned_p (operands[1]))
1289 rtx mov;
1290 switch (mode)
1292 case SImode:
1293 mov = gen_movsi_internal (operands[0], operands[1]);
1294 break;
1295 case HImode:
1296 mov = gen_movhi_internal (operands[0], operands[1]);
1297 break;
1298 case QImode:
1299 mov = gen_movqi_internal (operands[0], operands[1]);
1300 break;
1301 default:
1302 abort ();
1305 /* Insert the instructions before any other argument copies.
1306 (The set_frame_ptr insn comes _after_ the move, so push it
1307 out first.) */
1308 push_topmost_sequence ();
1309 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1310 emit_insn_after (mov, get_insns ());
1311 pop_topmost_sequence ();
1313 return 1;
1317 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1318 instruction won't be recognized after reload. So we remove the
1319 subreg and adjust mem accordingly. */
1320 if (reload_in_progress)
1322 operands[0] = fixup_subreg_mem (operands[0]);
1323 operands[1] = fixup_subreg_mem (operands[1]);
1325 return 0;
1328 static rtx
1329 fixup_subreg_mem (x)
1330 rtx x;
1332 if (GET_CODE (x) == SUBREG
1333 && GET_CODE (SUBREG_REG (x)) == REG
1334 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1336 rtx temp =
1337 gen_rtx_SUBREG (GET_MODE (x),
1338 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1339 SUBREG_BYTE (x));
1340 x = alter_subreg (&temp);
1342 return x;
1346 /* Try to expand a block move operation to an RTL block move instruction.
1347 If not optimizing or if the block size is not a constant or if the
1348 block is small, the expansion fails and GCC falls back to calling
1349 memcpy().
1351 operands[0] is the destination
1352 operands[1] is the source
1353 operands[2] is the length
1354 operands[3] is the alignment */
1357 xtensa_expand_block_move (operands)
1358 rtx *operands;
1360 rtx dest = operands[0];
1361 rtx src = operands[1];
1362 int bytes = INTVAL (operands[2]);
1363 int align = XINT (operands[3], 0);
1364 int num_pieces, move_ratio;
1366 /* If this is not a fixed size move, just call memcpy */
1367 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1368 return 0;
1370 /* Anything to move? */
1371 if (bytes <= 0)
1372 return 1;
1374 if (align > MOVE_MAX)
1375 align = MOVE_MAX;
1377 /* decide whether to expand inline based on the optimization level */
1378 move_ratio = 4;
1379 if (optimize > 2)
1380 move_ratio = LARGEST_MOVE_RATIO;
1381 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1382 if (num_pieces >= move_ratio)
1383 return 0;
1385 /* make sure the memory addresses are valid */
1386 operands[0] = validize_mem (dest);
1387 operands[1] = validize_mem (src);
1389 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1390 operands[2], operands[3]));
1391 return 1;
1395 /* Emit a sequence of instructions to implement a block move, trying
1396 to hide load delay slots as much as possible. Load N values into
1397 temporary registers, store those N values, and repeat until the
1398 complete block has been moved. N=delay_slots+1 */
1400 struct meminsnbuf {
1401 char template[30];
1402 rtx operands[2];
1405 void
1406 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1407 rtx *operands;
1408 rtx *tmpregs;
1409 int delay_slots;
1411 rtx dest = operands[0];
1412 rtx src = operands[1];
1413 int bytes = INTVAL (operands[2]);
1414 int align = XINT (operands[3], 0);
1415 rtx from_addr = XEXP (src, 0);
1416 rtx to_addr = XEXP (dest, 0);
1417 int from_struct = MEM_IN_STRUCT_P (src);
1418 int to_struct = MEM_IN_STRUCT_P (dest);
1419 int offset = 0;
1420 int chunk_size, item_size;
1421 struct meminsnbuf *ldinsns, *stinsns;
1422 const char *ldname, *stname;
1423 enum machine_mode mode;
1425 if (align > MOVE_MAX)
1426 align = MOVE_MAX;
1427 item_size = align;
1428 chunk_size = delay_slots + 1;
1430 ldinsns = (struct meminsnbuf *)
1431 alloca (chunk_size * sizeof (struct meminsnbuf));
1432 stinsns = (struct meminsnbuf *)
1433 alloca (chunk_size * sizeof (struct meminsnbuf));
1435 mode = xtensa_find_mode_for_size (item_size);
1436 item_size = GET_MODE_SIZE (mode);
1437 ldname = xtensa_ld_opcodes[(int) mode];
1438 stname = xtensa_st_opcodes[(int) mode];
1440 while (bytes > 0)
1442 int n;
1444 for (n = 0; n < chunk_size; n++)
1446 rtx addr, mem;
1448 if (bytes == 0)
1450 chunk_size = n;
1451 break;
1454 if (bytes < item_size)
1456 /* find a smaller item_size which we can load & store */
1457 item_size = bytes;
1458 mode = xtensa_find_mode_for_size (item_size);
1459 item_size = GET_MODE_SIZE (mode);
1460 ldname = xtensa_ld_opcodes[(int) mode];
1461 stname = xtensa_st_opcodes[(int) mode];
1464 /* record the load instruction opcode and operands */
1465 addr = plus_constant (from_addr, offset);
1466 mem = gen_rtx_MEM (mode, addr);
1467 if (! memory_address_p (mode, addr))
1468 abort ();
1469 MEM_IN_STRUCT_P (mem) = from_struct;
1470 ldinsns[n].operands[0] = tmpregs[n];
1471 ldinsns[n].operands[1] = mem;
1472 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1474 /* record the store instruction opcode and operands */
1475 addr = plus_constant (to_addr, offset);
1476 mem = gen_rtx_MEM (mode, addr);
1477 if (! memory_address_p (mode, addr))
1478 abort ();
1479 MEM_IN_STRUCT_P (mem) = to_struct;
1480 stinsns[n].operands[0] = tmpregs[n];
1481 stinsns[n].operands[1] = mem;
1482 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1484 offset += item_size;
1485 bytes -= item_size;
1488 /* now output the loads followed by the stores */
1489 for (n = 0; n < chunk_size; n++)
1490 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1491 for (n = 0; n < chunk_size; n++)
1492 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1497 static enum machine_mode
1498 xtensa_find_mode_for_size (item_size)
1499 unsigned item_size;
1501 enum machine_mode mode, tmode;
1503 while (1)
1505 mode = VOIDmode;
1507 /* find mode closest to but not bigger than item_size */
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) <= item_size)
1511 mode = tmode;
1512 if (mode == VOIDmode)
1513 abort ();
1515 item_size = GET_MODE_SIZE (mode);
1517 if (xtensa_ld_opcodes[(int) mode]
1518 && xtensa_st_opcodes[(int) mode])
1519 break;
1521 /* cannot load & store this mode; try something smaller */
1522 item_size -= 1;
1525 return mode;
1529 void
1530 xtensa_expand_nonlocal_goto (operands)
1531 rtx *operands;
1533 rtx goto_handler = operands[1];
1534 rtx containing_fp = operands[3];
1536 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1537 is too big to generate in-line */
1539 if (GET_CODE (containing_fp) != REG)
1540 containing_fp = force_reg (Pmode, containing_fp);
1542 goto_handler = replace_rtx (copy_rtx (goto_handler),
1543 virtual_stack_vars_rtx,
1544 containing_fp);
1546 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1547 0, VOIDmode, 2,
1548 containing_fp, Pmode,
1549 goto_handler, Pmode);
1553 static struct machine_function *
1554 xtensa_init_machine_status ()
1556 return ggc_alloc_cleared (sizeof (struct machine_function));
1560 void
1561 xtensa_setup_frame_addresses ()
1563 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1564 cfun->machine->accesses_prev_frame = 1;
1566 emit_library_call
1567 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1568 0, VOIDmode, 0);
1572 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1573 a comment showing where the end of the loop is. However, if there is a
1574 label or a branch at the end of the loop then we need to place a nop
1575 there. If the loop ends with a label we need the nop so that branches
1576 targetting that label will target the nop (and thus remain in the loop),
1577 instead of targetting the instruction after the loop (and thus exiting
1578 the loop). If the loop ends with a branch, we need the nop in case the
1579 branch is targetting a location inside the loop. When the branch
1580 executes it will cause the loop count to be decremented even if it is
1581 taken (because it is the last instruction in the loop), so we need to
1582 nop after the branch to prevent the loop count from being decremented
1583 when the branch is taken. */
1585 void
1586 xtensa_emit_loop_end (insn, operands)
1587 rtx insn;
1588 rtx *operands;
1590 char done = 0;
1592 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1594 switch (GET_CODE (insn))
1596 case NOTE:
1597 case BARRIER:
1598 break;
1600 case CODE_LABEL:
1601 output_asm_insn ("nop.n", operands);
1602 done = 1;
1603 break;
1605 default:
1607 rtx body = PATTERN (insn);
1609 if (GET_CODE (body) == JUMP_INSN)
1611 output_asm_insn ("nop.n", operands);
1612 done = 1;
1614 else if ((GET_CODE (body) != USE)
1615 && (GET_CODE (body) != CLOBBER))
1616 done = 1;
1618 break;
1622 output_asm_insn ("# loop end for %0", operands);
1626 char *
1627 xtensa_emit_call (callop, operands)
1628 int callop;
1629 rtx *operands;
1631 static char result[64];
1632 rtx tgt = operands[callop];
1634 if (GET_CODE (tgt) == CONST_INT)
1635 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1636 else if (register_operand (tgt, VOIDmode))
1637 sprintf (result, "callx8\t%%%d", callop);
1638 else
1639 sprintf (result, "call8\t%%%d", callop);
1641 return result;
1645 /* Return the stabs register number to use for 'regno'. */
1648 xtensa_dbx_register_number (regno)
1649 int regno;
1651 int first = -1;
1653 if (GP_REG_P (regno)) {
1654 regno -= GP_REG_FIRST;
1655 first = 0;
1657 else if (BR_REG_P (regno)) {
1658 regno -= BR_REG_FIRST;
1659 first = 16;
1661 else if (FP_REG_P (regno)) {
1662 regno -= FP_REG_FIRST;
1663 /* The current numbering convention is that TIE registers are
1664 numbered in libcc order beginning with 256. We can't guarantee
1665 that the FP registers will come first, so the following is just
1666 a guess. It seems like we should make a special case for FP
1667 registers and give them fixed numbers < 256. */
1668 first = 256;
1670 else if (ACC_REG_P (regno))
1672 first = 0;
1673 regno = -1;
1676 /* When optimizing, we sometimes get asked about pseudo-registers
1677 that don't represent hard registers. Return 0 for these. */
1678 if (first == -1)
1679 return 0;
1681 return first + regno;
1685 /* Argument support functions. */
1687 /* Initialize CUMULATIVE_ARGS for a function. */
1689 void
1690 init_cumulative_args (cum, fntype, libname)
1691 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1692 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1693 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1695 cum->arg_words = 0;
1698 /* Advance the argument to the next argument position. */
1700 void
1701 function_arg_advance (cum, mode, type)
1702 CUMULATIVE_ARGS *cum; /* current arg information */
1703 enum machine_mode mode; /* current arg mode */
1704 tree type; /* type of the argument or 0 if lib support */
1706 int words, max;
1707 int *arg_words;
1709 arg_words = &cum->arg_words;
1710 max = MAX_ARGS_IN_REGISTERS;
1712 words = (((mode != BLKmode)
1713 ? (int) GET_MODE_SIZE (mode)
1714 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1716 if ((*arg_words + words > max) && (*arg_words < max))
1717 *arg_words = max;
1719 *arg_words += words;
1723 /* Return an RTL expression containing the register for the given mode,
1724 or 0 if the argument is to be passed on the stack. */
1727 function_arg (cum, mode, type, incoming_p)
1728 CUMULATIVE_ARGS *cum; /* current arg information */
1729 enum machine_mode mode; /* current arg mode */
1730 tree type; /* type of the argument or 0 if lib support */
1731 int incoming_p; /* computing the incoming registers? */
1733 int regbase, words, max;
1734 int *arg_words;
1735 int regno;
1736 enum machine_mode result_mode;
1738 arg_words = &cum->arg_words;
1739 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1740 max = MAX_ARGS_IN_REGISTERS;
1742 words = (((mode != BLKmode)
1743 ? (int) GET_MODE_SIZE (mode)
1744 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1746 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1747 *arg_words += (*arg_words & 1);
1749 if (*arg_words + words > max)
1750 return (rtx)0;
1752 regno = regbase + *arg_words;
1753 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1757 modes bigger than 2 words (because we only have patterns for
1758 modes of 2 words or smaller), we can't control the expansion
1759 unless we explicitly list the individual registers in a PARALLEL. */
1761 if ((mode == BLKmode || words > 2)
1762 && regno < A7_REG
1763 && regno + words > A7_REG)
1765 rtx result;
1766 int n;
1768 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1769 for (n = 0; n < words; n++)
1771 XVECEXP (result, 0, n) =
1772 gen_rtx_EXPR_LIST (VOIDmode,
1773 gen_raw_REG (SImode, regno + n),
1774 GEN_INT (n * UNITS_PER_WORD));
1776 return result;
1779 return gen_raw_REG (result_mode, regno);
1783 void
1784 override_options ()
1786 int regno;
1787 enum machine_mode mode;
1789 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1790 error ("boolean registers required for the floating-point option");
1792 /* set up the tables of ld/st opcode names for block moves */
1793 xtensa_ld_opcodes[(int) SImode] = "l32i";
1794 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1795 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1796 xtensa_st_opcodes[(int) SImode] = "s32i";
1797 xtensa_st_opcodes[(int) HImode] = "s16i";
1798 xtensa_st_opcodes[(int) QImode] = "s8i";
1800 xtensa_char_to_class['q'] = SP_REG;
1801 xtensa_char_to_class['a'] = GR_REGS;
1802 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1803 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1804 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1805 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1806 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1810 /* Set up array giving whether a given register can hold a given mode. */
1811 for (mode = VOIDmode;
1812 mode != MAX_MACHINE_MODE;
1813 mode = (enum machine_mode) ((int) mode + 1))
1815 int size = GET_MODE_SIZE (mode);
1816 enum mode_class class = GET_MODE_CLASS (mode);
1818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1820 int temp;
1822 if (ACC_REG_P (regno))
1823 temp = (TARGET_MAC16 &&
1824 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1825 else if (GP_REG_P (regno))
1826 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1827 else if (FP_REG_P (regno))
1828 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1829 else if (BR_REG_P (regno))
1830 temp = (TARGET_BOOLEANS && (mode == CCmode));
1831 else
1832 temp = FALSE;
1834 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1838 init_machine_status = xtensa_init_machine_status;
1840 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1841 some targets need to always use PIC. */
1842 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1843 flag_pic = 1;
1847 /* A C compound statement to output to stdio stream STREAM the
1848 assembler syntax for an instruction operand X. X is an RTL
1849 expression.
1851 CODE is a value that can be used to specify one of several ways
1852 of printing the operand. It is used when identical operands
1853 must be printed differently depending on the context. CODE
1854 comes from the '%' specification that was used to request
1855 printing of the operand. If the specification was just '%DIGIT'
1856 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1857 is the ASCII code for LTR.
1859 If X is a register, this macro should print the register's name.
1860 The names can be found in an array 'reg_names' whose type is
1861 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1863 When the machine description has a specification '%PUNCT' (a '%'
1864 followed by a punctuation character), this macro is called with
1865 a null pointer for X and the punctuation character for CODE.
1867 'a', 'c', 'l', and 'n' are reserved.
1869 The Xtensa specific codes are:
1871 'd' CONST_INT, print as signed decimal
1872 'x' CONST_INT, print as signed hexadecimal
1873 'K' CONST_INT, print number of bits in mask for EXTUI
1874 'R' CONST_INT, print (X & 0x1f)
1875 'L' CONST_INT, print ((32 - X) & 0x1f)
1876 'D' REG, print second register of double-word register operand
1877 'N' MEM, print address of next word following a memory operand
1878 'v' MEM, if memory reference is volatile, output a MEMW before it
1881 static void
1882 printx (file, val)
1883 FILE *file;
1884 signed int val;
1886 /* print a hexadecimal value in a nice way */
1887 if ((val > -0xa) && (val < 0xa))
1888 fprintf (file, "%d", val);
1889 else if (val < 0)
1890 fprintf (file, "-0x%x", -val);
1891 else
1892 fprintf (file, "0x%x", val);
1896 void
1897 print_operand (file, op, letter)
1898 FILE *file; /* file to write to */
1899 rtx op; /* operand to print */
1900 int letter; /* %<letter> or 0 */
1902 enum rtx_code code;
1904 if (! op)
1905 error ("PRINT_OPERAND null pointer");
1907 code = GET_CODE (op);
1908 switch (code)
1910 case REG:
1911 case SUBREG:
1913 int regnum = xt_true_regnum (op);
1914 if (letter == 'D')
1915 regnum++;
1916 fprintf (file, "%s", reg_names[regnum]);
1917 break;
1920 case MEM:
1921 /* For a volatile memory reference, emit a MEMW before the
1922 load or store. */
1923 if (letter == 'v')
1925 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1926 fprintf (file, "memw\n\t");
1927 break;
1929 else if (letter == 'N')
1931 enum machine_mode mode;
1932 switch (GET_MODE (op))
1934 case DFmode: mode = SFmode; break;
1935 case DImode: mode = SImode; break;
1936 default: abort ();
1938 op = adjust_address (op, mode, 4);
1941 output_address (XEXP (op, 0));
1942 break;
1944 case CONST_INT:
1945 switch (letter)
1947 case 'K':
1949 int num_bits = 0;
1950 unsigned val = INTVAL (op);
1951 while (val & 1)
1953 num_bits += 1;
1954 val = val >> 1;
1956 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1957 fatal_insn ("invalid mask", op);
1959 fprintf (file, "%d", num_bits);
1960 break;
1963 case 'L':
1964 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1965 break;
1967 case 'R':
1968 fprintf (file, "%d", INTVAL (op) & 0x1f);
1969 break;
1971 case 'x':
1972 printx (file, INTVAL (op));
1973 break;
1975 case 'd':
1976 default:
1977 fprintf (file, "%d", INTVAL (op));
1978 break;
1981 break;
1983 default:
1984 output_addr_const (file, op);
1989 /* A C compound statement to output to stdio stream STREAM the
1990 assembler syntax for an instruction operand that is a memory
1991 reference whose address is ADDR. ADDR is an RTL expression. */
1993 void
1994 print_operand_address (file, addr)
1995 FILE *file;
1996 rtx addr;
1998 if (!addr)
1999 error ("PRINT_OPERAND_ADDRESS, null pointer");
2001 switch (GET_CODE (addr))
2003 default:
2004 fatal_insn ("invalid address", addr);
2005 break;
2007 case REG:
2008 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2009 break;
2011 case PLUS:
2013 rtx reg = (rtx)0;
2014 rtx offset = (rtx)0;
2015 rtx arg0 = XEXP (addr, 0);
2016 rtx arg1 = XEXP (addr, 1);
2018 if (GET_CODE (arg0) == REG)
2020 reg = arg0;
2021 offset = arg1;
2023 else if (GET_CODE (arg1) == REG)
2025 reg = arg1;
2026 offset = arg0;
2028 else
2029 fatal_insn ("no register in address", addr);
2031 if (CONSTANT_P (offset))
2033 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2034 output_addr_const (file, offset);
2036 else
2037 fatal_insn ("address offset not a constant", addr);
2039 break;
2041 case LABEL_REF:
2042 case SYMBOL_REF:
2043 case CONST_INT:
2044 case CONST:
2045 output_addr_const (file, addr);
2046 break;
2051 /* Emit either a label, .comm, or .lcomm directive. */
2053 void
2054 xtensa_declare_object (file, name, init_string, final_string, size)
2055 FILE *file;
2056 char *name;
2057 char *init_string;
2058 char *final_string;
2059 int size;
2061 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2062 assemble_name (file, name);
2063 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2067 void
2068 xtensa_output_literal (file, x, mode, labelno)
2069 FILE *file;
2070 rtx x;
2071 enum machine_mode mode;
2072 int labelno;
2074 long value_long[2];
2075 REAL_VALUE_TYPE r;
2076 int size;
2078 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2080 switch (GET_MODE_CLASS (mode))
2082 case MODE_FLOAT:
2083 if (GET_CODE (x) != CONST_DOUBLE)
2084 abort ();
2086 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2087 switch (mode)
2089 case SFmode:
2090 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2091 fprintf (file, "0x%08lx\n", value_long[0]);
2092 break;
2094 case DFmode:
2095 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2096 fprintf (file, "0x%08lx, 0x%08lx\n",
2097 value_long[0], value_long[1]);
2098 break;
2100 default:
2101 abort ();
2104 break;
2106 case MODE_INT:
2107 case MODE_PARTIAL_INT:
2108 size = GET_MODE_SIZE (mode);
2109 if (size == 4)
2111 output_addr_const (file, x);
2112 fputs ("\n", file);
2114 else if (size == 8)
2116 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2117 fputs (", ", file);
2118 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2119 fputs ("\n", file);
2121 else
2122 abort ();
2123 break;
2125 default:
2126 abort ();
2131 /* Return the bytes needed to compute the frame pointer from the current
2132 stack pointer. */
2134 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2135 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2137 long
2138 compute_frame_size (size)
2139 int size; /* # of var. bytes allocated */
2141 /* add space for the incoming static chain value */
2142 if (current_function_needs_context)
2143 size += (1 * UNITS_PER_WORD);
2145 xtensa_current_frame_size =
2146 XTENSA_STACK_ALIGN (size
2147 + current_function_outgoing_args_size
2148 + (WINDOW_SIZE * UNITS_PER_WORD));
2149 return xtensa_current_frame_size;
2154 xtensa_frame_pointer_required ()
2156 /* The code to expand builtin_frame_addr and builtin_return_addr
2157 currently uses the hard_frame_pointer instead of frame_pointer.
2158 This seems wrong but maybe it's necessary for other architectures.
2159 This function is derived from the i386 code. */
2161 if (cfun->machine->accesses_prev_frame)
2162 return 1;
2164 return 0;
2168 void
2169 xtensa_reorg (first)
2170 rtx first;
2172 rtx insn, set_frame_ptr_insn = 0;
2174 unsigned long tsize = compute_frame_size (get_frame_size ());
2175 if (tsize < (1 << (12+3)))
2176 frame_size_const = 0;
2177 else
2179 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2181 /* make sure the constant is used so it doesn't get eliminated
2182 from the constant pool */
2183 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2186 if (!frame_pointer_needed)
2187 return;
2189 /* Search all instructions, looking for the insn that sets up the
2190 frame pointer. This search will fail if the function does not
2191 have an incoming argument in $a7, but in that case, we can just
2192 set up the frame pointer at the very beginning of the
2193 function. */
2195 for (insn = first; insn; insn = NEXT_INSN (insn))
2197 rtx pat;
2199 if (!INSN_P (insn))
2200 continue;
2202 pat = PATTERN (insn);
2203 if (GET_CODE (pat) == UNSPEC_VOLATILE
2204 && (XINT (pat, 1) == UNSPECV_SET_FP))
2206 set_frame_ptr_insn = insn;
2207 break;
2211 if (set_frame_ptr_insn)
2213 /* for all instructions prior to set_frame_ptr_insn, replace
2214 hard_frame_pointer references with stack_pointer */
2215 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2217 if (INSN_P (insn))
2218 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2219 hard_frame_pointer_rtx,
2220 stack_pointer_rtx);
2223 else
2225 /* emit the frame pointer move immediately after the NOTE that starts
2226 the function */
2227 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2228 stack_pointer_rtx), first);
2233 /* Set up the stack and frame (if desired) for the function. */
2235 void
2236 xtensa_function_prologue (file, size)
2237 FILE *file;
2238 int size ATTRIBUTE_UNUSED;
2240 unsigned long tsize = compute_frame_size (get_frame_size ());
2242 if (frame_pointer_needed)
2243 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2244 else
2245 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2248 if (tsize < (1 << (12+3)))
2250 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2252 else
2254 fprintf (file, "\tentry\tsp, 16\n");
2256 /* use a8 as a temporary since a0-a7 may be live */
2257 fprintf (file, "\tl32r\ta8, ");
2258 print_operand (file, frame_size_const, 0);
2259 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2260 fprintf (file, "\tmovsp\tsp, a8\n");
2265 /* Do any necessary cleanup after a function to restore
2266 stack, frame, and regs. */
2268 void
2269 xtensa_function_epilogue (file, size)
2270 FILE *file;
2271 int size ATTRIBUTE_UNUSED;
2273 rtx insn = get_last_insn ();
2274 /* If the last insn was a BARRIER, we don't have to write anything. */
2275 if (GET_CODE (insn) == NOTE)
2276 insn = prev_nonnote_insn (insn);
2277 if (insn == 0 || GET_CODE (insn) != BARRIER)
2278 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2280 xtensa_current_frame_size = 0;
2284 /* Create the va_list data type.
2285 This structure is set up by __builtin_saveregs. The __va_reg
2286 field points to a stack-allocated region holding the contents of the
2287 incoming argument registers. The __va_ndx field is an index initialized
2288 to the position of the first unnamed (variable) argument. This same index
2289 is also used to address the arguments passed in memory. Thus, the
2290 __va_stk field is initialized to point to the position of the first
2291 argument in memory offset to account for the arguments passed in
2292 registers. E.G., if there are 6 argument registers, and each register is
2293 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2294 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2295 argument word N for N >= 6. */
2297 tree
2298 xtensa_build_va_list (void)
2300 tree f_stk, f_reg, f_ndx, record, type_decl;
2302 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2303 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2305 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2306 ptr_type_node);
2307 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2308 ptr_type_node);
2309 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2310 integer_type_node);
2312 DECL_FIELD_CONTEXT (f_stk) = record;
2313 DECL_FIELD_CONTEXT (f_reg) = record;
2314 DECL_FIELD_CONTEXT (f_ndx) = record;
2316 TREE_CHAIN (record) = type_decl;
2317 TYPE_NAME (record) = type_decl;
2318 TYPE_FIELDS (record) = f_stk;
2319 TREE_CHAIN (f_stk) = f_reg;
2320 TREE_CHAIN (f_reg) = f_ndx;
2322 layout_type (record);
2323 return record;
2327 /* Save the incoming argument registers on the stack. Returns the
2328 address of the saved registers. */
2331 xtensa_builtin_saveregs ()
2333 rtx gp_regs, dest;
2334 int arg_words = current_function_arg_words;
2335 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2336 int i;
2338 if (gp_left == 0)
2339 return const0_rtx;
2341 /* allocate the general-purpose register space */
2342 gp_regs = assign_stack_local
2343 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2344 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2346 /* Now store the incoming registers. */
2347 dest = change_address (gp_regs, SImode,
2348 plus_constant (XEXP (gp_regs, 0),
2349 arg_words * UNITS_PER_WORD));
2351 /* Note: Don't use move_block_from_reg() here because the incoming
2352 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2353 Instead, call gen_raw_REG() directly so that we get a distinct
2354 instance of (REG:SI 7). */
2355 for (i = 0; i < gp_left; i++)
2357 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2358 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2361 return XEXP (gp_regs, 0);
2365 /* Implement `va_start' for varargs and stdarg. We look at the
2366 current function to fill in an initial va_list. */
2368 void
2369 xtensa_va_start (stdarg_p, valist, nextarg)
2370 int stdarg_p ATTRIBUTE_UNUSED;
2371 tree valist;
2372 rtx nextarg ATTRIBUTE_UNUSED;
2374 tree f_stk, stk;
2375 tree f_reg, reg;
2376 tree f_ndx, ndx;
2377 tree t, u;
2378 int arg_words;
2380 arg_words = current_function_args_info.arg_words;
2382 f_stk = TYPE_FIELDS (va_list_type_node);
2383 f_reg = TREE_CHAIN (f_stk);
2384 f_ndx = TREE_CHAIN (f_reg);
2386 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2387 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2388 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2390 /* Call __builtin_saveregs; save the result in __va_reg */
2391 current_function_arg_words = arg_words;
2392 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2393 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2394 TREE_SIDE_EFFECTS (t) = 1;
2395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2397 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2398 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2399 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2400 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2401 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2402 TREE_SIDE_EFFECTS (t) = 1;
2403 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2405 /* Set the __va_ndx member. */
2406 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2407 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2408 TREE_SIDE_EFFECTS (t) = 1;
2409 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2413 /* Implement `va_arg'. */
2416 xtensa_va_arg (valist, type)
2417 tree valist, type;
2419 tree f_stk, stk;
2420 tree f_reg, reg;
2421 tree f_ndx, ndx;
2422 tree tmp, addr_tree, type_size;
2423 rtx array, orig_ndx, r, addr, size, va_size;
2424 rtx lab_false, lab_over, lab_false2;
2426 f_stk = TYPE_FIELDS (va_list_type_node);
2427 f_reg = TREE_CHAIN (f_stk);
2428 f_ndx = TREE_CHAIN (f_reg);
2430 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2431 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2432 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2434 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2436 va_size = gen_reg_rtx (SImode);
2437 tmp = fold (build (MULT_EXPR, sizetype,
2438 fold (build (TRUNC_DIV_EXPR, sizetype,
2439 fold (build (PLUS_EXPR, sizetype,
2440 type_size,
2441 size_int (UNITS_PER_WORD - 1))),
2442 size_int (UNITS_PER_WORD))),
2443 size_int (UNITS_PER_WORD)));
2444 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2445 if (r != va_size)
2446 emit_move_insn (va_size, r);
2449 /* First align __va_ndx to a double word boundary if necessary for this arg:
2451 if (__alignof__ (TYPE) > 4)
2452 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2455 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2457 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2458 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2459 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2460 build_int_2 (-2 * UNITS_PER_WORD, -1));
2461 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2462 TREE_SIDE_EFFECTS (tmp) = 1;
2463 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2467 /* Increment __va_ndx to point past the argument:
2469 orig_ndx = (AP).__va_ndx;
2470 (AP).__va_ndx += __va_size (TYPE);
2473 orig_ndx = gen_reg_rtx (SImode);
2474 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2475 if (r != orig_ndx)
2476 emit_move_insn (orig_ndx, r);
2478 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2479 make_tree (intSI_type_node, va_size));
2480 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2481 TREE_SIDE_EFFECTS (tmp) = 1;
2482 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2485 /* Check if the argument is in registers:
2487 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2488 && !MUST_PASS_IN_STACK (type))
2489 __array = (AP).__va_reg;
2492 array = gen_reg_rtx (Pmode);
2494 lab_over = NULL_RTX;
2495 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2497 lab_false = gen_label_rtx ();
2498 lab_over = gen_label_rtx ();
2500 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2501 EXPAND_NORMAL),
2502 GEN_INT (MAX_ARGS_IN_REGISTERS
2503 * UNITS_PER_WORD),
2504 GT, const1_rtx, SImode, 0, lab_false);
2506 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2507 if (r != array)
2508 emit_move_insn (array, r);
2510 emit_jump_insn (gen_jump (lab_over));
2511 emit_barrier ();
2512 emit_label (lab_false);
2515 /* ...otherwise, the argument is on the stack (never split between
2516 registers and the stack -- change __va_ndx if necessary):
2518 else
2520 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2521 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2522 __array = (AP).__va_stk;
2526 lab_false2 = gen_label_rtx ();
2527 emit_cmp_and_jump_insns (orig_ndx,
2528 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2529 GE, const1_rtx, SImode, 0, lab_false2);
2531 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2532 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2533 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2534 TREE_SIDE_EFFECTS (tmp) = 1;
2535 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2537 emit_label (lab_false2);
2539 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2540 if (r != array)
2541 emit_move_insn (array, r);
2543 if (lab_over != NULL_RTX)
2544 emit_label (lab_over);
2547 /* Given the base array pointer (__array) and index to the subsequent
2548 argument (__va_ndx), find the address:
2550 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2551 ? sizeof (TYPE)
2552 : __va_size (TYPE))
2554 The results are endian-dependent because values smaller than one word
2555 are aligned differently.
2558 size = gen_reg_rtx (SImode);
2559 emit_move_insn (size, va_size);
2561 if (BYTES_BIG_ENDIAN)
2563 rtx lab_use_va_size = gen_label_rtx ();
2565 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2566 EXPAND_NORMAL),
2567 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2568 GE, const1_rtx, SImode, 0, lab_use_va_size);
2570 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2571 if (r != size)
2572 emit_move_insn (size, r);
2574 emit_label (lab_use_va_size);
2577 addr_tree = build (PLUS_EXPR, ptr_type_node,
2578 make_tree (ptr_type_node, array),
2579 ndx);
2580 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2581 make_tree (intSI_type_node, size));
2582 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2583 addr = copy_to_reg (addr);
2584 return addr;
2588 enum reg_class
2589 xtensa_preferred_reload_class (x, class)
2590 rtx x;
2591 enum reg_class class;
2593 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2594 return NO_REGS;
2596 /* Don't use sp for reloads! */
2597 if (class == AR_REGS)
2598 return GR_REGS;
2600 return class;
2604 enum reg_class
2605 xtensa_secondary_reload_class (class, mode, x, isoutput)
2606 enum reg_class class;
2607 enum machine_mode mode ATTRIBUTE_UNUSED;
2608 rtx x;
2609 int isoutput;
2611 int regno;
2613 if (GET_CODE (x) == SIGN_EXTEND)
2614 x = XEXP (x, 0);
2615 regno = xt_true_regnum (x);
2617 if (!isoutput)
2619 if (class == FP_REGS && constantpool_mem_p (x))
2620 return GR_REGS;
2623 if (ACC_REG_P (regno))
2624 return (class == GR_REGS ? NO_REGS : GR_REGS);
2625 if (class == ACC_REG)
2626 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2628 return NO_REGS;
2632 void
2633 order_regs_for_local_alloc ()
2635 if (!leaf_function_p ())
2637 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2638 FIRST_PSEUDO_REGISTER * sizeof (int));
2640 else
2642 int i, num_arg_regs;
2643 int nxt = 0;
2645 /* use the AR registers in increasing order (skipping a0 and a1)
2646 but save the incoming argument registers for a last resort */
2647 num_arg_regs = current_function_args_info.arg_words;
2648 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2649 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2650 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2651 reg_alloc_order[nxt++] = i + num_arg_regs;
2652 for (i = 0; i < num_arg_regs; i++)
2653 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2655 /* list the FP registers in order for now */
2656 for (i = 0; i < 16; i++)
2657 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2659 /* GCC requires that we list *all* the registers.... */
2660 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2661 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2662 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2663 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2665 /* list the coprocessor registers in order */
2666 for (i = 0; i < BR_REG_NUM; i++)
2667 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2669 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2674 /* A customized version of reg_overlap_mentioned_p that only looks for
2675 references to a7 (as opposed to hard_frame_pointer_rtx). */
2678 a7_overlap_mentioned_p (x)
2679 rtx x;
2681 int i, j;
2682 unsigned int x_regno;
2683 const char *fmt;
2685 if (GET_CODE (x) == REG)
2687 x_regno = REGNO (x);
2688 return (x != hard_frame_pointer_rtx
2689 && x_regno < A7_REG + 1
2690 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2693 if (GET_CODE (x) == SUBREG
2694 && GET_CODE (SUBREG_REG (x)) == REG
2695 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2697 x_regno = subreg_regno (x);
2698 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2699 && x_regno < A7_REG + 1
2700 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2703 /* X does not match, so try its subexpressions. */
2704 fmt = GET_RTX_FORMAT (GET_CODE (x));
2705 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2707 if (fmt[i] == 'e')
2709 if (a7_overlap_mentioned_p (XEXP (x, i)))
2710 return 1;
2712 else if (fmt[i] == 'E')
2714 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2715 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2716 return 1;
2720 return 0;
2723 /* The literal pool stays with the function. */
2725 static void
2726 xtensa_select_rtx_section (mode, x, align)
2727 enum machine_mode mode ATTRIBUTE_UNUSED;
2728 rtx x ATTRIBUTE_UNUSED;
2729 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2731 function_section (current_function_decl);
2734 /* If we are referencing a function that is static, make the SYMBOL_REF
2735 special so that we can generate direct calls to it even with -fpic. */
2737 static void
2738 xtensa_encode_section_info (decl, first)
2739 tree decl;
2740 int first ATTRIBUTE_UNUSED;
2742 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2743 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2746 #include "gt-xtensa.h"