xtensa.h (REG_CLASS_NAMES, [...]): Add new RL_REGS register class.
[official-gcc.git] / gcc / config / xtensa / xtensa.c
blobae7c994d9a857161c0c86c834c419cd18999d419
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "machmode.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "toplev.h"
44 #include "optabs.h"
45 #include "output.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
54 of EQ, NE, etc. */
56 enum internal_test {
57 ITEST_EQ,
58 ITEST_NE,
59 ITEST_GT,
60 ITEST_GE,
61 ITEST_LT,
62 ITEST_LE,
63 ITEST_GTU,
64 ITEST_GEU,
65 ITEST_LTU,
66 ITEST_LEU,
67 ITEST_MAX
70 /* Cached operands, and operator to compare for use in set/branch on
71 condition codes. */
72 rtx branch_cmp[2];
74 /* what type of branch to use */
75 enum cmp_type branch_type;
77 /* Array giving truth value on whether or not a given hard register
78 can support a given mode. */
79 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
81 /* Current frame size calculated by compute_frame_size. */
82 unsigned xtensa_current_frame_size;
84 /* Tables of ld/st opcode names for block moves */
85 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
86 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
87 #define LARGEST_MOVE_RATIO 15
89 /* Define the structure for the machine field in struct function. */
90 struct machine_function GTY(())
92 int accesses_prev_frame;
95 /* Vector, indexed by hard register number, which contains 1 for a
96 register that is allowable in a candidate for leaf function
97 treatment. */
99 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1, 1, 1,
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 /* Map hard register number to register class */
108 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
110 RL_REGS, SP_REG, RL_REGS, RL_REGS,
111 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
112 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
114 AR_REGS, AR_REGS, BR_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 ACC_REG,
122 /* Map register constraint character to register class. */
123 enum reg_class xtensa_char_to_class[256] =
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 static int b4const_or_zero PARAMS ((int));
192 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
193 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
194 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
195 static rtx gen_conditional_move PARAMS ((rtx));
196 static rtx fixup_subreg_mem PARAMS ((rtx x));
197 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
198 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
199 static void printx PARAMS ((FILE *, signed int));
200 static unsigned int xtensa_multibss_section_type_flags
201 PARAMS ((tree, const char *, int));
202 static void xtensa_select_rtx_section
203 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
204 static void xtensa_encode_section_info PARAMS ((tree, int));
206 static rtx frame_size_const;
207 static int current_function_arg_words;
208 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
209 REG_ALLOC_ORDER;
211 /* This macro generates the assembly code for function entry.
212 FILE is a stdio stream to output the code to.
213 SIZE is an int: how many units of temporary storage to allocate.
214 Refer to the array 'regs_ever_live' to determine which registers
215 to save; 'regs_ever_live[I]' is nonzero if register number I
216 is ever used in the function. This macro is responsible for
217 knowing which registers should not be saved even if used. */
219 #undef TARGET_ASM_FUNCTION_PROLOGUE
220 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
222 /* This macro generates the assembly code for function exit,
223 on machines that need it. If FUNCTION_EPILOGUE is not defined
224 then individual return instructions are generated for each
225 return statement. Args are same as for FUNCTION_PROLOGUE. */
227 #undef TARGET_ASM_FUNCTION_EPILOGUE
228 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
230 /* These hooks specify assembly directives for creating certain kinds
231 of integer object. */
233 #undef TARGET_ASM_ALIGNED_SI_OP
234 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
236 #undef TARGET_ASM_SELECT_RTX_SECTION
237 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
238 #undef TARGET_ENCODE_SECTION_INFO
239 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
241 struct gcc_target targetm = TARGET_INITIALIZER;
245 * Functions to test Xtensa immediate operand validity.
249 xtensa_b4constu (v)
250 int v;
252 switch (v)
254 case 32768:
255 case 65536:
256 case 2:
257 case 3:
258 case 4:
259 case 5:
260 case 6:
261 case 7:
262 case 8:
263 case 10:
264 case 12:
265 case 16:
266 case 32:
267 case 64:
268 case 128:
269 case 256:
270 return 1;
272 return 0;
276 xtensa_simm8x256 (v)
277 int v;
279 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
283 xtensa_ai4const (v)
284 int v;
286 return (v == -1 || (v >= 1 && v <= 15));
290 xtensa_simm7 (v)
291 int v;
293 return v >= -32 && v <= 95;
297 xtensa_b4const (v)
298 int v;
300 switch (v)
302 case -1:
303 case 1:
304 case 2:
305 case 3:
306 case 4:
307 case 5:
308 case 6:
309 case 7:
310 case 8:
311 case 10:
312 case 12:
313 case 16:
314 case 32:
315 case 64:
316 case 128:
317 case 256:
318 return 1;
320 return 0;
324 xtensa_simm8 (v)
325 int v;
327 return v >= -128 && v <= 127;
331 xtensa_tp7 (v)
332 int v;
334 return (v >= 7 && v <= 22);
338 xtensa_lsi4x4 (v)
339 int v;
341 return (v & 3) == 0 && (v >= 0 && v <= 60);
345 xtensa_simm12b (v)
346 int v;
348 return v >= -2048 && v <= 2047;
352 xtensa_uimm8 (v)
353 int v;
355 return v >= 0 && v <= 255;
359 xtensa_uimm8x2 (v)
360 int v;
362 return (v & 1) == 0 && (v >= 0 && v <= 510);
366 xtensa_uimm8x4 (v)
367 int v;
369 return (v & 3) == 0 && (v >= 0 && v <= 1020);
373 /* This is just like the standard true_regnum() function except that it
374 works even when reg_renumber is not initialized. */
377 xt_true_regnum (x)
378 rtx x;
380 if (GET_CODE (x) == REG)
382 if (reg_renumber
383 && REGNO (x) >= FIRST_PSEUDO_REGISTER
384 && reg_renumber[REGNO (x)] >= 0)
385 return reg_renumber[REGNO (x)];
386 return REGNO (x);
388 if (GET_CODE (x) == SUBREG)
390 int base = xt_true_regnum (SUBREG_REG (x));
391 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
392 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
393 GET_MODE (SUBREG_REG (x)),
394 SUBREG_BYTE (x), GET_MODE (x));
396 return -1;
401 add_operand (op, mode)
402 rtx op;
403 enum machine_mode mode;
405 if (GET_CODE (op) == CONST_INT)
406 return (xtensa_simm8 (INTVAL (op)) ||
407 xtensa_simm8x256 (INTVAL (op)));
409 return register_operand (op, mode);
414 arith_operand (op, mode)
415 rtx op;
416 enum machine_mode mode;
418 if (GET_CODE (op) == CONST_INT)
419 return xtensa_simm8 (INTVAL (op));
421 return register_operand (op, mode);
426 nonimmed_operand (op, mode)
427 rtx op;
428 enum machine_mode mode;
430 /* We cannot use the standard nonimmediate_operand() predicate because
431 it includes constant pool memory operands. */
433 if (memory_operand (op, mode))
434 return !constantpool_address_p (XEXP (op, 0));
436 return register_operand (op, mode);
441 mem_operand (op, mode)
442 rtx op;
443 enum machine_mode mode;
445 /* We cannot use the standard memory_operand() predicate because
446 it includes constant pool memory operands. */
448 if (memory_operand (op, mode))
449 return !constantpool_address_p (XEXP (op, 0));
451 return FALSE;
456 xtensa_valid_move (mode, operands)
457 enum machine_mode mode;
458 rtx *operands;
460 /* Either the destination or source must be a register, and the
461 MAC16 accumulator doesn't count. */
463 if (register_operand (operands[0], mode))
465 int dst_regnum = xt_true_regnum (operands[0]);
467 /* The stack pointer can only be assigned with a MOVSP opcode. */
468 if (dst_regnum == STACK_POINTER_REGNUM)
469 return (mode == SImode
470 && register_operand (operands[1], mode)
471 && !ACC_REG_P (xt_true_regnum (operands[1])));
473 if (!ACC_REG_P (dst_regnum))
474 return true;
476 if (register_operand (operands[1], mode))
478 int src_regnum = xt_true_regnum (operands[1]);
479 if (!ACC_REG_P (src_regnum))
480 return true;
482 return FALSE;
487 mask_operand (op, mode)
488 rtx op;
489 enum machine_mode mode;
491 if (GET_CODE (op) == CONST_INT)
492 return xtensa_mask_immediate (INTVAL (op));
494 return register_operand (op, mode);
499 extui_fldsz_operand (op, mode)
500 rtx op;
501 enum machine_mode mode ATTRIBUTE_UNUSED;
503 return ((GET_CODE (op) == CONST_INT)
504 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
509 sext_operand (op, mode)
510 rtx op;
511 enum machine_mode mode;
513 if (TARGET_SEXT)
514 return nonimmed_operand (op, mode);
515 return mem_operand (op, mode);
520 sext_fldsz_operand (op, mode)
521 rtx op;
522 enum machine_mode mode ATTRIBUTE_UNUSED;
524 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
529 lsbitnum_operand (op, mode)
530 rtx op;
531 enum machine_mode mode ATTRIBUTE_UNUSED;
533 if (GET_CODE (op) == CONST_INT)
535 return (BITS_BIG_ENDIAN
536 ? (INTVAL (op) == BITS_PER_WORD-1)
537 : (INTVAL (op) == 0));
539 return FALSE;
543 static int
544 b4const_or_zero (v)
545 int v;
547 if (v == 0)
548 return TRUE;
549 return xtensa_b4const (v);
554 branch_operand (op, mode)
555 rtx op;
556 enum machine_mode mode;
558 if (GET_CODE (op) == CONST_INT)
559 return b4const_or_zero (INTVAL (op));
561 return register_operand (op, mode);
566 ubranch_operand (op, mode)
567 rtx op;
568 enum machine_mode mode;
570 if (GET_CODE (op) == CONST_INT)
571 return xtensa_b4constu (INTVAL (op));
573 return register_operand (op, mode);
578 call_insn_operand (op, mode)
579 rtx op;
580 enum machine_mode mode ATTRIBUTE_UNUSED;
582 if ((GET_CODE (op) == REG)
583 && (op != arg_pointer_rtx)
584 && ((REGNO (op) < FRAME_POINTER_REGNUM)
585 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
586 return TRUE;
588 if (CONSTANT_ADDRESS_P (op))
590 /* Direct calls only allowed to static functions with PIC. */
591 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
592 && SYMBOL_REF_FLAG (op)));
595 return FALSE;
600 move_operand (op, mode)
601 rtx op;
602 enum machine_mode mode;
604 if (register_operand (op, mode))
605 return TRUE;
607 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
608 result in 0/1. */
609 if (GET_CODE (op) == CONSTANT_P_RTX)
610 return TRUE;
612 if (GET_CODE (op) == CONST_INT)
613 return xtensa_simm12b (INTVAL (op));
615 if (GET_CODE (op) == MEM)
616 return memory_address_p (mode, XEXP (op, 0));
618 return FALSE;
623 smalloffset_mem_p (op)
624 rtx op;
626 if (GET_CODE (op) == MEM)
628 rtx addr = XEXP (op, 0);
629 if (GET_CODE (addr) == REG)
630 return REG_OK_FOR_BASE_P (addr);
631 if (GET_CODE (addr) == PLUS)
633 rtx offset = XEXP (addr, 0);
634 if (GET_CODE (offset) != CONST_INT)
635 offset = XEXP (addr, 1);
636 if (GET_CODE (offset) != CONST_INT)
637 return FALSE;
638 return xtensa_lsi4x4 (INTVAL (offset));
641 return FALSE;
646 smalloffset_double_mem_p (op)
647 rtx op;
649 if (!smalloffset_mem_p (op))
650 return FALSE;
651 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
656 constantpool_address_p (addr)
657 rtx addr;
659 rtx sym = addr;
661 if (GET_CODE (addr) == CONST)
663 rtx offset;
665 /* only handle (PLUS (SYM, OFFSET)) form */
666 addr = XEXP (addr, 0);
667 if (GET_CODE (addr) != PLUS)
668 return FALSE;
670 /* make sure the address is word aligned */
671 offset = XEXP (addr, 1);
672 if ((GET_CODE (offset) != CONST_INT)
673 || ((INTVAL (offset) & 3) != 0))
674 return FALSE;
676 sym = XEXP (addr, 0);
679 if ((GET_CODE (sym) == SYMBOL_REF)
680 && CONSTANT_POOL_ADDRESS_P (sym))
681 return TRUE;
682 return FALSE;
687 constantpool_mem_p (op)
688 rtx op;
690 if (GET_CODE (op) == MEM)
691 return constantpool_address_p (XEXP (op, 0));
692 return FALSE;
697 non_const_move_operand (op, mode)
698 rtx op;
699 enum machine_mode mode;
701 if (register_operand (op, mode))
702 return 1;
703 if (GET_CODE (op) == SUBREG)
704 op = SUBREG_REG (op);
705 if (GET_CODE (op) == MEM)
706 return memory_address_p (mode, XEXP (op, 0));
707 return FALSE;
711 /* Accept the floating point constant 1 in the appropriate mode. */
714 const_float_1_operand (op, mode)
715 rtx op;
716 enum machine_mode mode;
718 REAL_VALUE_TYPE d;
719 static REAL_VALUE_TYPE onedf;
720 static REAL_VALUE_TYPE onesf;
721 static int one_initialized;
723 if ((GET_CODE (op) != CONST_DOUBLE)
724 || (mode != GET_MODE (op))
725 || (mode != DFmode && mode != SFmode))
726 return FALSE;
728 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
730 if (! one_initialized)
732 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
733 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
734 one_initialized = TRUE;
737 if (mode == DFmode)
738 return REAL_VALUES_EQUAL (d, onedf);
739 else
740 return REAL_VALUES_EQUAL (d, onesf);
745 fpmem_offset_operand (op, mode)
746 rtx op;
747 enum machine_mode mode ATTRIBUTE_UNUSED;
749 if (GET_CODE (op) == CONST_INT)
750 return xtensa_mem_offset (INTVAL (op), SFmode);
751 return 0;
755 void
756 xtensa_extend_reg (dst, src)
757 rtx dst;
758 rtx src;
760 rtx temp = gen_reg_rtx (SImode);
761 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
763 /* generate paradoxical subregs as needed so that the modes match */
764 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
765 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
767 emit_insn (gen_ashlsi3 (temp, src, shift));
768 emit_insn (gen_ashrsi3 (dst, temp, shift));
772 void
773 xtensa_load_constant (dst, src)
774 rtx dst;
775 rtx src;
777 enum machine_mode mode = GET_MODE (dst);
778 src = force_const_mem (SImode, src);
780 /* PC-relative loads are always SImode so we have to add a SUBREG if that
781 is not the desired mode */
783 if (mode != SImode)
785 if (register_operand (dst, mode))
786 dst = simplify_gen_subreg (SImode, dst, mode, 0);
787 else
789 src = force_reg (SImode, src);
790 src = gen_lowpart_SUBREG (mode, src);
794 emit_move_insn (dst, src);
799 branch_operator (x, mode)
800 rtx x;
801 enum machine_mode mode;
803 if (GET_MODE (x) != mode)
804 return FALSE;
806 switch (GET_CODE (x))
808 case EQ:
809 case NE:
810 case LT:
811 case GE:
812 return TRUE;
813 default:
814 break;
816 return FALSE;
821 ubranch_operator (x, mode)
822 rtx x;
823 enum machine_mode mode;
825 if (GET_MODE (x) != mode)
826 return FALSE;
828 switch (GET_CODE (x))
830 case LTU:
831 case GEU:
832 return TRUE;
833 default:
834 break;
836 return FALSE;
841 boolean_operator (x, mode)
842 rtx x;
843 enum machine_mode mode;
845 if (GET_MODE (x) != mode)
846 return FALSE;
848 switch (GET_CODE (x))
850 case EQ:
851 case NE:
852 return TRUE;
853 default:
854 break;
856 return FALSE;
861 xtensa_mask_immediate (v)
862 int v;
864 #define MAX_MASK_SIZE 16
865 int mask_size;
867 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
869 if ((v & 1) == 0)
870 return FALSE;
871 v = v >> 1;
872 if (v == 0)
873 return TRUE;
876 return FALSE;
881 xtensa_mem_offset (v, mode)
882 unsigned v;
883 enum machine_mode mode;
885 switch (mode)
887 case BLKmode:
888 /* Handle the worst case for block moves. See xtensa_expand_block_move
889 where we emit an optimized block move operation if the block can be
890 moved in < "move_ratio" pieces. The worst case is when the block is
891 aligned but has a size of (3 mod 4) (does this happen?) so that the
892 last piece requires a byte load/store. */
893 return (xtensa_uimm8 (v) &&
894 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
896 case QImode:
897 return xtensa_uimm8 (v);
899 case HImode:
900 return xtensa_uimm8x2 (v);
902 case DFmode:
903 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
905 default:
906 break;
909 return xtensa_uimm8x4 (v);
913 /* Make normal rtx_code into something we can index from an array */
915 static enum internal_test
916 map_test_to_internal_test (test_code)
917 enum rtx_code test_code;
919 enum internal_test test = ITEST_MAX;
921 switch (test_code)
923 default: break;
924 case EQ: test = ITEST_EQ; break;
925 case NE: test = ITEST_NE; break;
926 case GT: test = ITEST_GT; break;
927 case GE: test = ITEST_GE; break;
928 case LT: test = ITEST_LT; break;
929 case LE: test = ITEST_LE; break;
930 case GTU: test = ITEST_GTU; break;
931 case GEU: test = ITEST_GEU; break;
932 case LTU: test = ITEST_LTU; break;
933 case LEU: test = ITEST_LEU; break;
936 return test;
940 /* Generate the code to compare two integer values. The return value is
941 the comparison expression. */
943 static rtx
944 gen_int_relational (test_code, cmp0, cmp1, p_invert)
945 enum rtx_code test_code; /* relational test (EQ, etc) */
946 rtx cmp0; /* first operand to compare */
947 rtx cmp1; /* second operand to compare */
948 int *p_invert; /* whether branch needs to reverse its test */
950 struct cmp_info {
951 enum rtx_code test_code; /* test code to use in insn */
952 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
953 int const_add; /* constant to add (convert LE -> LT) */
954 int reverse_regs; /* reverse registers in test */
955 int invert_const; /* != 0 if invert value if cmp1 is constant */
956 int invert_reg; /* != 0 if invert value if cmp1 is register */
957 int unsignedp; /* != 0 for unsigned comparisons. */
960 static struct cmp_info info[ (int)ITEST_MAX ] = {
962 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
963 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
965 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
966 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
967 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
968 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
970 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
971 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
972 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
973 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
976 enum internal_test test;
977 enum machine_mode mode;
978 struct cmp_info *p_info;
980 test = map_test_to_internal_test (test_code);
981 if (test == ITEST_MAX)
982 abort ();
984 p_info = &info[ (int)test ];
986 mode = GET_MODE (cmp0);
987 if (mode == VOIDmode)
988 mode = GET_MODE (cmp1);
990 /* Make sure we can handle any constants given to us. */
991 if (GET_CODE (cmp1) == CONST_INT)
993 HOST_WIDE_INT value = INTVAL (cmp1);
994 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
996 /* if the immediate overflows or does not fit in the immediate field,
997 spill it to a register */
999 if ((p_info->unsignedp ?
1000 (uvalue + p_info->const_add > uvalue) :
1001 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1003 cmp1 = force_reg (mode, cmp1);
1005 else if (!(p_info->const_range_p) (value + p_info->const_add))
1007 cmp1 = force_reg (mode, cmp1);
1010 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1012 cmp1 = force_reg (mode, cmp1);
1015 /* See if we need to invert the result. */
1016 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1017 ? p_info->invert_const
1018 : p_info->invert_reg);
1020 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1021 Comparison between two registers, may involve switching operands. */
1022 if (GET_CODE (cmp1) == CONST_INT)
1024 if (p_info->const_add != 0)
1025 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1028 else if (p_info->reverse_regs)
1030 rtx temp = cmp0;
1031 cmp0 = cmp1;
1032 cmp1 = temp;
1035 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1039 /* Generate the code to compare two float values. The return value is
1040 the comparison expression. */
1042 static rtx
1043 gen_float_relational (test_code, cmp0, cmp1)
1044 enum rtx_code test_code; /* relational test (EQ, etc) */
1045 rtx cmp0; /* first operand to compare */
1046 rtx cmp1; /* second operand to compare */
1048 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1049 rtx brtmp;
1050 int reverse_regs, invert;
1052 switch (test_code)
1054 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1055 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1056 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1057 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1058 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1059 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1060 default:
1061 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1062 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1065 if (reverse_regs)
1067 rtx temp = cmp0;
1068 cmp0 = cmp1;
1069 cmp1 = temp;
1072 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1073 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1075 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1079 void
1080 xtensa_expand_conditional_branch (operands, test_code)
1081 rtx *operands;
1082 enum rtx_code test_code;
1084 enum cmp_type type = branch_type;
1085 rtx cmp0 = branch_cmp[0];
1086 rtx cmp1 = branch_cmp[1];
1087 rtx cmp;
1088 int invert;
1089 rtx label1, label2;
1091 switch (type)
1093 case CMP_DF:
1094 default:
1095 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1097 case CMP_SI:
1098 invert = FALSE;
1099 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1100 break;
1102 case CMP_SF:
1103 if (!TARGET_HARD_FLOAT)
1104 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1105 invert = FALSE;
1106 cmp = gen_float_relational (test_code, cmp0, cmp1);
1107 break;
1110 /* Generate the branch. */
1112 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1113 label2 = pc_rtx;
1115 if (invert)
1117 label2 = label1;
1118 label1 = pc_rtx;
1121 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1122 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1123 label1,
1124 label2)));
1128 static rtx
1129 gen_conditional_move (cmp)
1130 rtx cmp;
1132 enum rtx_code code = GET_CODE (cmp);
1133 rtx op0 = branch_cmp[0];
1134 rtx op1 = branch_cmp[1];
1136 if (branch_type == CMP_SI)
1138 /* Jump optimization calls get_condition() which canonicalizes
1139 comparisons like (GE x <const>) to (GT x <const-1>).
1140 Transform those comparisons back to GE, since that is the
1141 comparison supported in Xtensa. We shouldn't have to
1142 transform <LE x const> comparisons, because neither
1143 xtensa_expand_conditional_branch() nor get_condition() will
1144 produce them. */
1146 if ((code == GT) && (op1 == constm1_rtx))
1148 code = GE;
1149 op1 = const0_rtx;
1151 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1153 if (boolean_operator (cmp, VOIDmode))
1155 /* swap the operands to make const0 second */
1156 if (op0 == const0_rtx)
1158 op0 = op1;
1159 op1 = const0_rtx;
1162 /* if not comparing against zero, emit a comparison (subtract) */
1163 if (op1 != const0_rtx)
1165 op0 = expand_binop (SImode, sub_optab, op0, op1,
1166 0, 0, OPTAB_LIB_WIDEN);
1167 op1 = const0_rtx;
1170 else if (branch_operator (cmp, VOIDmode))
1172 /* swap the operands to make const0 second */
1173 if (op0 == const0_rtx)
1175 op0 = op1;
1176 op1 = const0_rtx;
1178 switch (code)
1180 case LT: code = GE; break;
1181 case GE: code = LT; break;
1182 default: abort ();
1186 if (op1 != const0_rtx)
1187 return 0;
1189 else
1190 return 0;
1192 return gen_rtx (code, VOIDmode, op0, op1);
1195 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1196 return gen_float_relational (code, op0, op1);
1198 return 0;
1203 xtensa_expand_conditional_move (operands, isflt)
1204 rtx *operands;
1205 int isflt;
1207 rtx cmp;
1208 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1210 if (!(cmp = gen_conditional_move (operands[1])))
1211 return 0;
1213 if (isflt)
1214 gen_fn = (branch_type == CMP_SI
1215 ? gen_movsfcc_internal0
1216 : gen_movsfcc_internal1);
1217 else
1218 gen_fn = (branch_type == CMP_SI
1219 ? gen_movsicc_internal0
1220 : gen_movsicc_internal1);
1222 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1223 operands[2], operands[3], cmp));
1224 return 1;
1229 xtensa_expand_scc (operands)
1230 rtx *operands;
1232 rtx dest = operands[0];
1233 rtx cmp = operands[1];
1234 rtx one_tmp, zero_tmp;
1235 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1237 if (!(cmp = gen_conditional_move (cmp)))
1238 return 0;
1240 one_tmp = gen_reg_rtx (SImode);
1241 zero_tmp = gen_reg_rtx (SImode);
1242 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1243 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1245 gen_fn = (branch_type == CMP_SI
1246 ? gen_movsicc_internal0
1247 : gen_movsicc_internal1);
1248 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1249 return 1;
1253 /* Emit insns to move operands[1] into operands[0].
1255 Return 1 if we have written out everything that needs to be done to
1256 do the move. Otherwise, return 0 and the caller will emit the move
1257 normally. */
1260 xtensa_emit_move_sequence (operands, mode)
1261 rtx *operands;
1262 enum machine_mode mode;
1264 if (CONSTANT_P (operands[1])
1265 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1266 && (GET_CODE (operands[1]) != CONST_INT
1267 || !xtensa_simm12b (INTVAL (operands[1]))))
1269 xtensa_load_constant (operands[0], operands[1]);
1270 return 1;
1273 if (!(reload_in_progress | reload_completed))
1275 if (!xtensa_valid_move (mode, operands))
1276 operands[1] = force_reg (mode, operands[1]);
1278 /* Check if this move is copying an incoming argument in a7. If
1279 so, emit the move, followed by the special "set_frame_ptr"
1280 unspec_volatile insn, at the very beginning of the function.
1281 This is necessary because the register allocator will ignore
1282 conflicts with a7 and may assign some other pseudo to a7. If
1283 that pseudo was assigned prior to this move, it would clobber
1284 the incoming argument in a7. By copying the argument out of
1285 a7 as the very first thing, and then immediately following
1286 that with an unspec_volatile to keep the scheduler away, we
1287 should avoid any problems. */
1289 if (a7_overlap_mentioned_p (operands[1]))
1291 rtx mov;
1292 switch (mode)
1294 case SImode:
1295 mov = gen_movsi_internal (operands[0], operands[1]);
1296 break;
1297 case HImode:
1298 mov = gen_movhi_internal (operands[0], operands[1]);
1299 break;
1300 case QImode:
1301 mov = gen_movqi_internal (operands[0], operands[1]);
1302 break;
1303 default:
1304 abort ();
1307 /* Insert the instructions before any other argument copies.
1308 (The set_frame_ptr insn comes _after_ the move, so push it
1309 out first.) */
1310 push_topmost_sequence ();
1311 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1312 emit_insn_after (mov, get_insns ());
1313 pop_topmost_sequence ();
1315 return 1;
1319 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1320 instruction won't be recognized after reload. So we remove the
1321 subreg and adjust mem accordingly. */
1322 if (reload_in_progress)
1324 operands[0] = fixup_subreg_mem (operands[0]);
1325 operands[1] = fixup_subreg_mem (operands[1]);
1327 return 0;
1330 static rtx
1331 fixup_subreg_mem (x)
1332 rtx x;
1334 if (GET_CODE (x) == SUBREG
1335 && GET_CODE (SUBREG_REG (x)) == REG
1336 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1338 rtx temp =
1339 gen_rtx_SUBREG (GET_MODE (x),
1340 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1341 SUBREG_BYTE (x));
1342 x = alter_subreg (&temp);
1344 return x;
1348 /* Try to expand a block move operation to an RTL block move instruction.
1349 If not optimizing or if the block size is not a constant or if the
1350 block is small, the expansion fails and GCC falls back to calling
1351 memcpy().
1353 operands[0] is the destination
1354 operands[1] is the source
1355 operands[2] is the length
1356 operands[3] is the alignment */
1359 xtensa_expand_block_move (operands)
1360 rtx *operands;
1362 rtx dest = operands[0];
1363 rtx src = operands[1];
1364 int bytes = INTVAL (operands[2]);
1365 int align = XINT (operands[3], 0);
1366 int num_pieces, move_ratio;
1368 /* If this is not a fixed size move, just call memcpy */
1369 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1370 return 0;
1372 /* Anything to move? */
1373 if (bytes <= 0)
1374 return 1;
1376 if (align > MOVE_MAX)
1377 align = MOVE_MAX;
1379 /* decide whether to expand inline based on the optimization level */
1380 move_ratio = 4;
1381 if (optimize > 2)
1382 move_ratio = LARGEST_MOVE_RATIO;
1383 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1384 if (num_pieces >= move_ratio)
1385 return 0;
1387 /* make sure the memory addresses are valid */
1388 operands[0] = validize_mem (dest);
1389 operands[1] = validize_mem (src);
1391 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1392 operands[2], operands[3]));
1393 return 1;
1397 /* Emit a sequence of instructions to implement a block move, trying
1398 to hide load delay slots as much as possible. Load N values into
1399 temporary registers, store those N values, and repeat until the
1400 complete block has been moved. N=delay_slots+1 */
1402 struct meminsnbuf {
1403 char template[30];
1404 rtx operands[2];
1407 void
1408 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1409 rtx *operands;
1410 rtx *tmpregs;
1411 int delay_slots;
1413 rtx dest = operands[0];
1414 rtx src = operands[1];
1415 int bytes = INTVAL (operands[2]);
1416 int align = XINT (operands[3], 0);
1417 rtx from_addr = XEXP (src, 0);
1418 rtx to_addr = XEXP (dest, 0);
1419 int from_struct = MEM_IN_STRUCT_P (src);
1420 int to_struct = MEM_IN_STRUCT_P (dest);
1421 int offset = 0;
1422 int chunk_size, item_size;
1423 struct meminsnbuf *ldinsns, *stinsns;
1424 const char *ldname, *stname;
1425 enum machine_mode mode;
1427 if (align > MOVE_MAX)
1428 align = MOVE_MAX;
1429 item_size = align;
1430 chunk_size = delay_slots + 1;
1432 ldinsns = (struct meminsnbuf *)
1433 alloca (chunk_size * sizeof (struct meminsnbuf));
1434 stinsns = (struct meminsnbuf *)
1435 alloca (chunk_size * sizeof (struct meminsnbuf));
1437 mode = xtensa_find_mode_for_size (item_size);
1438 item_size = GET_MODE_SIZE (mode);
1439 ldname = xtensa_ld_opcodes[(int) mode];
1440 stname = xtensa_st_opcodes[(int) mode];
1442 while (bytes > 0)
1444 int n;
1446 for (n = 0; n < chunk_size; n++)
1448 rtx addr, mem;
1450 if (bytes == 0)
1452 chunk_size = n;
1453 break;
1456 if (bytes < item_size)
1458 /* find a smaller item_size which we can load & store */
1459 item_size = bytes;
1460 mode = xtensa_find_mode_for_size (item_size);
1461 item_size = GET_MODE_SIZE (mode);
1462 ldname = xtensa_ld_opcodes[(int) mode];
1463 stname = xtensa_st_opcodes[(int) mode];
1466 /* record the load instruction opcode and operands */
1467 addr = plus_constant (from_addr, offset);
1468 mem = gen_rtx_MEM (mode, addr);
1469 if (! memory_address_p (mode, addr))
1470 abort ();
1471 MEM_IN_STRUCT_P (mem) = from_struct;
1472 ldinsns[n].operands[0] = tmpregs[n];
1473 ldinsns[n].operands[1] = mem;
1474 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1476 /* record the store instruction opcode and operands */
1477 addr = plus_constant (to_addr, offset);
1478 mem = gen_rtx_MEM (mode, addr);
1479 if (! memory_address_p (mode, addr))
1480 abort ();
1481 MEM_IN_STRUCT_P (mem) = to_struct;
1482 stinsns[n].operands[0] = tmpregs[n];
1483 stinsns[n].operands[1] = mem;
1484 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1486 offset += item_size;
1487 bytes -= item_size;
1490 /* now output the loads followed by the stores */
1491 for (n = 0; n < chunk_size; n++)
1492 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1493 for (n = 0; n < chunk_size; n++)
1494 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1499 static enum machine_mode
1500 xtensa_find_mode_for_size (item_size)
1501 unsigned item_size;
1503 enum machine_mode mode, tmode;
1505 while (1)
1507 mode = VOIDmode;
1509 /* find mode closest to but not bigger than item_size */
1510 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1511 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1512 if (GET_MODE_SIZE (tmode) <= item_size)
1513 mode = tmode;
1514 if (mode == VOIDmode)
1515 abort ();
1517 item_size = GET_MODE_SIZE (mode);
1519 if (xtensa_ld_opcodes[(int) mode]
1520 && xtensa_st_opcodes[(int) mode])
1521 break;
1523 /* cannot load & store this mode; try something smaller */
1524 item_size -= 1;
1527 return mode;
1531 void
1532 xtensa_expand_nonlocal_goto (operands)
1533 rtx *operands;
1535 rtx goto_handler = operands[1];
1536 rtx containing_fp = operands[3];
1538 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1539 is too big to generate in-line */
1541 if (GET_CODE (containing_fp) != REG)
1542 containing_fp = force_reg (Pmode, containing_fp);
1544 goto_handler = replace_rtx (copy_rtx (goto_handler),
1545 virtual_stack_vars_rtx,
1546 containing_fp);
1548 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1549 0, VOIDmode, 2,
1550 containing_fp, Pmode,
1551 goto_handler, Pmode);
1555 static struct machine_function *
1556 xtensa_init_machine_status ()
1558 return ggc_alloc_cleared (sizeof (struct machine_function));
1562 void
1563 xtensa_setup_frame_addresses ()
1565 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1566 cfun->machine->accesses_prev_frame = 1;
1568 emit_library_call
1569 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1570 0, VOIDmode, 0);
1574 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1575 a comment showing where the end of the loop is. However, if there is a
1576 label or a branch at the end of the loop then we need to place a nop
1577 there. If the loop ends with a label we need the nop so that branches
1578 targetting that label will target the nop (and thus remain in the loop),
1579 instead of targetting the instruction after the loop (and thus exiting
1580 the loop). If the loop ends with a branch, we need the nop in case the
1581 branch is targetting a location inside the loop. When the branch
1582 executes it will cause the loop count to be decremented even if it is
1583 taken (because it is the last instruction in the loop), so we need to
1584 nop after the branch to prevent the loop count from being decremented
1585 when the branch is taken. */
1587 void
1588 xtensa_emit_loop_end (insn, operands)
1589 rtx insn;
1590 rtx *operands;
1592 char done = 0;
1594 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1596 switch (GET_CODE (insn))
1598 case NOTE:
1599 case BARRIER:
1600 break;
1602 case CODE_LABEL:
1603 output_asm_insn ("nop.n", operands);
1604 done = 1;
1605 break;
1607 default:
1609 rtx body = PATTERN (insn);
1611 if (GET_CODE (body) == JUMP_INSN)
1613 output_asm_insn ("nop.n", operands);
1614 done = 1;
1616 else if ((GET_CODE (body) != USE)
1617 && (GET_CODE (body) != CLOBBER))
1618 done = 1;
1620 break;
1624 output_asm_insn ("# loop end for %0", operands);
1628 char *
1629 xtensa_emit_call (callop, operands)
1630 int callop;
1631 rtx *operands;
1633 static char result[64];
1634 rtx tgt = operands[callop];
1636 if (GET_CODE (tgt) == CONST_INT)
1637 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1638 else if (register_operand (tgt, VOIDmode))
1639 sprintf (result, "callx8\t%%%d", callop);
1640 else
1641 sprintf (result, "call8\t%%%d", callop);
1643 return result;
1647 /* Return the stabs register number to use for 'regno'. */
1650 xtensa_dbx_register_number (regno)
1651 int regno;
1653 int first = -1;
1655 if (GP_REG_P (regno)) {
1656 regno -= GP_REG_FIRST;
1657 first = 0;
1659 else if (BR_REG_P (regno)) {
1660 regno -= BR_REG_FIRST;
1661 first = 16;
1663 else if (FP_REG_P (regno)) {
1664 regno -= FP_REG_FIRST;
1665 /* The current numbering convention is that TIE registers are
1666 numbered in libcc order beginning with 256. We can't guarantee
1667 that the FP registers will come first, so the following is just
1668 a guess. It seems like we should make a special case for FP
1669 registers and give them fixed numbers < 256. */
1670 first = 256;
1672 else if (ACC_REG_P (regno))
1674 first = 0;
1675 regno = -1;
1678 /* When optimizing, we sometimes get asked about pseudo-registers
1679 that don't represent hard registers. Return 0 for these. */
1680 if (first == -1)
1681 return 0;
1683 return first + regno;
1687 /* Argument support functions. */
1689 /* Initialize CUMULATIVE_ARGS for a function. */
1691 void
1692 init_cumulative_args (cum, fntype, libname)
1693 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1694 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1695 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1697 cum->arg_words = 0;
1700 /* Advance the argument to the next argument position. */
1702 void
1703 function_arg_advance (cum, mode, type)
1704 CUMULATIVE_ARGS *cum; /* current arg information */
1705 enum machine_mode mode; /* current arg mode */
1706 tree type; /* type of the argument or 0 if lib support */
1708 int words, max;
1709 int *arg_words;
1711 arg_words = &cum->arg_words;
1712 max = MAX_ARGS_IN_REGISTERS;
1714 words = (((mode != BLKmode)
1715 ? (int) GET_MODE_SIZE (mode)
1716 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1718 if ((*arg_words + words > max) && (*arg_words < max))
1719 *arg_words = max;
1721 *arg_words += words;
1725 /* Return an RTL expression containing the register for the given mode,
1726 or 0 if the argument is to be passed on the stack. */
1729 function_arg (cum, mode, type, incoming_p)
1730 CUMULATIVE_ARGS *cum; /* current arg information */
1731 enum machine_mode mode; /* current arg mode */
1732 tree type; /* type of the argument or 0 if lib support */
1733 int incoming_p; /* computing the incoming registers? */
1735 int regbase, words, max;
1736 int *arg_words;
1737 int regno;
1738 enum machine_mode result_mode;
1740 arg_words = &cum->arg_words;
1741 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1742 max = MAX_ARGS_IN_REGISTERS;
1744 words = (((mode != BLKmode)
1745 ? (int) GET_MODE_SIZE (mode)
1746 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1748 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1749 *arg_words += (*arg_words & 1);
1751 if (*arg_words + words > max)
1752 return (rtx)0;
1754 regno = regbase + *arg_words;
1755 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1757 /* We need to make sure that references to a7 are represented with
1758 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1759 modes bigger than 2 words (because we only have patterns for
1760 modes of 2 words or smaller), we can't control the expansion
1761 unless we explicitly list the individual registers in a PARALLEL. */
1763 if ((mode == BLKmode || words > 2)
1764 && regno < A7_REG
1765 && regno + words > A7_REG)
1767 rtx result;
1768 int n;
1770 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1771 for (n = 0; n < words; n++)
1773 XVECEXP (result, 0, n) =
1774 gen_rtx_EXPR_LIST (VOIDmode,
1775 gen_raw_REG (SImode, regno + n),
1776 GEN_INT (n * UNITS_PER_WORD));
1778 return result;
1781 return gen_raw_REG (result_mode, regno);
1785 void
1786 override_options ()
1788 int regno;
1789 enum machine_mode mode;
1791 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1792 error ("boolean registers required for the floating-point option");
1794 /* set up the tables of ld/st opcode names for block moves */
1795 xtensa_ld_opcodes[(int) SImode] = "l32i";
1796 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1797 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1798 xtensa_st_opcodes[(int) SImode] = "s32i";
1799 xtensa_st_opcodes[(int) HImode] = "s16i";
1800 xtensa_st_opcodes[(int) QImode] = "s8i";
1802 xtensa_char_to_class['q'] = SP_REG;
1803 xtensa_char_to_class['a'] = GR_REGS;
1804 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1805 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1806 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1807 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1808 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1809 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1810 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1812 /* Set up array giving whether a given register can hold a given mode. */
1813 for (mode = VOIDmode;
1814 mode != MAX_MACHINE_MODE;
1815 mode = (enum machine_mode) ((int) mode + 1))
1817 int size = GET_MODE_SIZE (mode);
1818 enum mode_class class = GET_MODE_CLASS (mode);
1820 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1822 int temp;
1824 if (ACC_REG_P (regno))
1825 temp = (TARGET_MAC16 &&
1826 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1827 else if (GP_REG_P (regno))
1828 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1829 else if (FP_REG_P (regno))
1830 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1831 else if (BR_REG_P (regno))
1832 temp = (TARGET_BOOLEANS && (mode == CCmode));
1833 else
1834 temp = FALSE;
1836 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1840 init_machine_status = xtensa_init_machine_status;
1842 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1843 some targets need to always use PIC. */
1844 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1845 flag_pic = 1;
1849 /* A C compound statement to output to stdio stream STREAM the
1850 assembler syntax for an instruction operand X. X is an RTL
1851 expression.
1853 CODE is a value that can be used to specify one of several ways
1854 of printing the operand. It is used when identical operands
1855 must be printed differently depending on the context. CODE
1856 comes from the '%' specification that was used to request
1857 printing of the operand. If the specification was just '%DIGIT'
1858 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1859 is the ASCII code for LTR.
1861 If X is a register, this macro should print the register's name.
1862 The names can be found in an array 'reg_names' whose type is
1863 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1865 When the machine description has a specification '%PUNCT' (a '%'
1866 followed by a punctuation character), this macro is called with
1867 a null pointer for X and the punctuation character for CODE.
1869 'a', 'c', 'l', and 'n' are reserved.
1871 The Xtensa specific codes are:
1873 'd' CONST_INT, print as signed decimal
1874 'x' CONST_INT, print as signed hexadecimal
1875 'K' CONST_INT, print number of bits in mask for EXTUI
1876 'R' CONST_INT, print (X & 0x1f)
1877 'L' CONST_INT, print ((32 - X) & 0x1f)
1878 'D' REG, print second register of double-word register operand
1879 'N' MEM, print address of next word following a memory operand
1880 'v' MEM, if memory reference is volatile, output a MEMW before it
1883 static void
1884 printx (file, val)
1885 FILE *file;
1886 signed int val;
1888 /* print a hexadecimal value in a nice way */
1889 if ((val > -0xa) && (val < 0xa))
1890 fprintf (file, "%d", val);
1891 else if (val < 0)
1892 fprintf (file, "-0x%x", -val);
1893 else
1894 fprintf (file, "0x%x", val);
1898 void
1899 print_operand (file, op, letter)
1900 FILE *file; /* file to write to */
1901 rtx op; /* operand to print */
1902 int letter; /* %<letter> or 0 */
1904 enum rtx_code code;
1906 if (! op)
1907 error ("PRINT_OPERAND null pointer");
1909 code = GET_CODE (op);
1910 switch (code)
1912 case REG:
1913 case SUBREG:
1915 int regnum = xt_true_regnum (op);
1916 if (letter == 'D')
1917 regnum++;
1918 fprintf (file, "%s", reg_names[regnum]);
1919 break;
1922 case MEM:
1923 /* For a volatile memory reference, emit a MEMW before the
1924 load or store. */
1925 if (letter == 'v')
1927 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1928 fprintf (file, "memw\n\t");
1929 break;
1931 else if (letter == 'N')
1933 enum machine_mode mode;
1934 switch (GET_MODE (op))
1936 case DFmode: mode = SFmode; break;
1937 case DImode: mode = SImode; break;
1938 default: abort ();
1940 op = adjust_address (op, mode, 4);
1943 output_address (XEXP (op, 0));
1944 break;
1946 case CONST_INT:
1947 switch (letter)
1949 case 'K':
1951 int num_bits = 0;
1952 unsigned val = INTVAL (op);
1953 while (val & 1)
1955 num_bits += 1;
1956 val = val >> 1;
1958 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1959 fatal_insn ("invalid mask", op);
1961 fprintf (file, "%d", num_bits);
1962 break;
1965 case 'L':
1966 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1967 break;
1969 case 'R':
1970 fprintf (file, "%d", INTVAL (op) & 0x1f);
1971 break;
1973 case 'x':
1974 printx (file, INTVAL (op));
1975 break;
1977 case 'd':
1978 default:
1979 fprintf (file, "%d", INTVAL (op));
1980 break;
1983 break;
1985 default:
1986 output_addr_const (file, op);
1991 /* A C compound statement to output to stdio stream STREAM the
1992 assembler syntax for an instruction operand that is a memory
1993 reference whose address is ADDR. ADDR is an RTL expression. */
1995 void
1996 print_operand_address (file, addr)
1997 FILE *file;
1998 rtx addr;
2000 if (!addr)
2001 error ("PRINT_OPERAND_ADDRESS, null pointer");
2003 switch (GET_CODE (addr))
2005 default:
2006 fatal_insn ("invalid address", addr);
2007 break;
2009 case REG:
2010 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2011 break;
2013 case PLUS:
2015 rtx reg = (rtx)0;
2016 rtx offset = (rtx)0;
2017 rtx arg0 = XEXP (addr, 0);
2018 rtx arg1 = XEXP (addr, 1);
2020 if (GET_CODE (arg0) == REG)
2022 reg = arg0;
2023 offset = arg1;
2025 else if (GET_CODE (arg1) == REG)
2027 reg = arg1;
2028 offset = arg0;
2030 else
2031 fatal_insn ("no register in address", addr);
2033 if (CONSTANT_P (offset))
2035 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2036 output_addr_const (file, offset);
2038 else
2039 fatal_insn ("address offset not a constant", addr);
2041 break;
2043 case LABEL_REF:
2044 case SYMBOL_REF:
2045 case CONST_INT:
2046 case CONST:
2047 output_addr_const (file, addr);
2048 break;
2053 /* Emit either a label, .comm, or .lcomm directive. */
2055 void
2056 xtensa_declare_object (file, name, init_string, final_string, size)
2057 FILE *file;
2058 char *name;
2059 char *init_string;
2060 char *final_string;
2061 int size;
2063 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2064 assemble_name (file, name);
2065 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2069 void
2070 xtensa_output_literal (file, x, mode, labelno)
2071 FILE *file;
2072 rtx x;
2073 enum machine_mode mode;
2074 int labelno;
2076 long value_long[2];
2077 REAL_VALUE_TYPE r;
2078 int size;
2080 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2082 switch (GET_MODE_CLASS (mode))
2084 case MODE_FLOAT:
2085 if (GET_CODE (x) != CONST_DOUBLE)
2086 abort ();
2088 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2089 switch (mode)
2091 case SFmode:
2092 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2093 fprintf (file, "0x%08lx\n", value_long[0]);
2094 break;
2096 case DFmode:
2097 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2098 fprintf (file, "0x%08lx, 0x%08lx\n",
2099 value_long[0], value_long[1]);
2100 break;
2102 default:
2103 abort ();
2106 break;
2108 case MODE_INT:
2109 case MODE_PARTIAL_INT:
2110 size = GET_MODE_SIZE (mode);
2111 if (size == 4)
2113 output_addr_const (file, x);
2114 fputs ("\n", file);
2116 else if (size == 8)
2118 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2119 fputs (", ", file);
2120 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2121 fputs ("\n", file);
2123 else
2124 abort ();
2125 break;
2127 default:
2128 abort ();
2133 /* Return the bytes needed to compute the frame pointer from the current
2134 stack pointer. */
2136 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2137 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2139 long
2140 compute_frame_size (size)
2141 int size; /* # of var. bytes allocated */
2143 /* add space for the incoming static chain value */
2144 if (current_function_needs_context)
2145 size += (1 * UNITS_PER_WORD);
2147 xtensa_current_frame_size =
2148 XTENSA_STACK_ALIGN (size
2149 + current_function_outgoing_args_size
2150 + (WINDOW_SIZE * UNITS_PER_WORD));
2151 return xtensa_current_frame_size;
2156 xtensa_frame_pointer_required ()
2158 /* The code to expand builtin_frame_addr and builtin_return_addr
2159 currently uses the hard_frame_pointer instead of frame_pointer.
2160 This seems wrong but maybe it's necessary for other architectures.
2161 This function is derived from the i386 code. */
2163 if (cfun->machine->accesses_prev_frame)
2164 return 1;
2166 return 0;
2170 void
2171 xtensa_reorg (first)
2172 rtx first;
2174 rtx insn, set_frame_ptr_insn = 0;
2176 unsigned long tsize = compute_frame_size (get_frame_size ());
2177 if (tsize < (1 << (12+3)))
2178 frame_size_const = 0;
2179 else
2181 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2183 /* make sure the constant is used so it doesn't get eliminated
2184 from the constant pool */
2185 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2188 if (!frame_pointer_needed)
2189 return;
2191 /* Search all instructions, looking for the insn that sets up the
2192 frame pointer. This search will fail if the function does not
2193 have an incoming argument in $a7, but in that case, we can just
2194 set up the frame pointer at the very beginning of the
2195 function. */
2197 for (insn = first; insn; insn = NEXT_INSN (insn))
2199 rtx pat;
2201 if (!INSN_P (insn))
2202 continue;
2204 pat = PATTERN (insn);
2205 if (GET_CODE (pat) == UNSPEC_VOLATILE
2206 && (XINT (pat, 1) == UNSPECV_SET_FP))
2208 set_frame_ptr_insn = insn;
2209 break;
2213 if (set_frame_ptr_insn)
2215 /* for all instructions prior to set_frame_ptr_insn, replace
2216 hard_frame_pointer references with stack_pointer */
2217 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2219 if (INSN_P (insn))
2220 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2221 hard_frame_pointer_rtx,
2222 stack_pointer_rtx);
2225 else
2227 /* emit the frame pointer move immediately after the NOTE that starts
2228 the function */
2229 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2230 stack_pointer_rtx), first);
2235 /* Set up the stack and frame (if desired) for the function. */
2237 void
2238 xtensa_function_prologue (file, size)
2239 FILE *file;
2240 int size ATTRIBUTE_UNUSED;
2242 unsigned long tsize = compute_frame_size (get_frame_size ());
2244 if (frame_pointer_needed)
2245 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2246 else
2247 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2250 if (tsize < (1 << (12+3)))
2252 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2254 else
2256 fprintf (file, "\tentry\tsp, 16\n");
2258 /* use a8 as a temporary since a0-a7 may be live */
2259 fprintf (file, "\tl32r\ta8, ");
2260 print_operand (file, frame_size_const, 0);
2261 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2262 fprintf (file, "\tmovsp\tsp, a8\n");
2267 /* Do any necessary cleanup after a function to restore
2268 stack, frame, and regs. */
2270 void
2271 xtensa_function_epilogue (file, size)
2272 FILE *file;
2273 int size ATTRIBUTE_UNUSED;
2275 rtx insn = get_last_insn ();
2276 /* If the last insn was a BARRIER, we don't have to write anything. */
2277 if (GET_CODE (insn) == NOTE)
2278 insn = prev_nonnote_insn (insn);
2279 if (insn == 0 || GET_CODE (insn) != BARRIER)
2280 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2282 xtensa_current_frame_size = 0;
2287 xtensa_return_addr (count, frame)
2288 int count;
2289 rtx frame;
2291 rtx result, retaddr;
2293 if (count == -1)
2294 retaddr = gen_rtx_REG (Pmode, 0);
2295 else
2297 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2298 addr = memory_address (Pmode, addr);
2299 retaddr = gen_reg_rtx (Pmode);
2300 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2303 /* The 2 most-significant bits of the return address on Xtensa hold
2304 the register window size. To get the real return address, these
2305 bits must be replaced with the high bits from the current PC. */
2307 result = gen_reg_rtx (Pmode);
2308 emit_insn (gen_fix_return_addr (result, retaddr));
2309 return result;
2313 /* Create the va_list data type.
2314 This structure is set up by __builtin_saveregs. The __va_reg
2315 field points to a stack-allocated region holding the contents of the
2316 incoming argument registers. The __va_ndx field is an index initialized
2317 to the position of the first unnamed (variable) argument. This same index
2318 is also used to address the arguments passed in memory. Thus, the
2319 __va_stk field is initialized to point to the position of the first
2320 argument in memory offset to account for the arguments passed in
2321 registers. E.G., if there are 6 argument registers, and each register is
2322 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2323 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2324 argument word N for N >= 6. */
2326 tree
2327 xtensa_build_va_list ()
2329 tree f_stk, f_reg, f_ndx, record, type_decl;
2331 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2332 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2334 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2335 ptr_type_node);
2336 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2337 ptr_type_node);
2338 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2339 integer_type_node);
2341 DECL_FIELD_CONTEXT (f_stk) = record;
2342 DECL_FIELD_CONTEXT (f_reg) = record;
2343 DECL_FIELD_CONTEXT (f_ndx) = record;
2345 TREE_CHAIN (record) = type_decl;
2346 TYPE_NAME (record) = type_decl;
2347 TYPE_FIELDS (record) = f_stk;
2348 TREE_CHAIN (f_stk) = f_reg;
2349 TREE_CHAIN (f_reg) = f_ndx;
2351 layout_type (record);
2352 return record;
2356 /* Save the incoming argument registers on the stack. Returns the
2357 address of the saved registers. */
2360 xtensa_builtin_saveregs ()
2362 rtx gp_regs, dest;
2363 int arg_words = current_function_arg_words;
2364 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2365 int i;
2367 if (gp_left == 0)
2368 return const0_rtx;
2370 /* allocate the general-purpose register space */
2371 gp_regs = assign_stack_local
2372 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2373 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2375 /* Now store the incoming registers. */
2376 dest = change_address (gp_regs, SImode,
2377 plus_constant (XEXP (gp_regs, 0),
2378 arg_words * UNITS_PER_WORD));
2380 /* Note: Don't use move_block_from_reg() here because the incoming
2381 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2382 Instead, call gen_raw_REG() directly so that we get a distinct
2383 instance of (REG:SI 7). */
2384 for (i = 0; i < gp_left; i++)
2386 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2387 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2390 return XEXP (gp_regs, 0);
2394 /* Implement `va_start' for varargs and stdarg. We look at the
2395 current function to fill in an initial va_list. */
2397 void
2398 xtensa_va_start (valist, nextarg)
2399 tree valist;
2400 rtx nextarg ATTRIBUTE_UNUSED;
2402 tree f_stk, stk;
2403 tree f_reg, reg;
2404 tree f_ndx, ndx;
2405 tree t, u;
2406 int arg_words;
2408 arg_words = current_function_args_info.arg_words;
2410 f_stk = TYPE_FIELDS (va_list_type_node);
2411 f_reg = TREE_CHAIN (f_stk);
2412 f_ndx = TREE_CHAIN (f_reg);
2414 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2415 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2416 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2418 /* Call __builtin_saveregs; save the result in __va_reg */
2419 current_function_arg_words = arg_words;
2420 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2421 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2422 TREE_SIDE_EFFECTS (t) = 1;
2423 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2425 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2426 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2427 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2428 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2429 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2430 TREE_SIDE_EFFECTS (t) = 1;
2431 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2433 /* Set the __va_ndx member. */
2434 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2435 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2436 TREE_SIDE_EFFECTS (t) = 1;
2437 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2441 /* Implement `va_arg'. */
2444 xtensa_va_arg (valist, type)
2445 tree valist, type;
2447 tree f_stk, stk;
2448 tree f_reg, reg;
2449 tree f_ndx, ndx;
2450 tree tmp, addr_tree, type_size;
2451 rtx array, orig_ndx, r, addr, size, va_size;
2452 rtx lab_false, lab_over, lab_false2;
2454 f_stk = TYPE_FIELDS (va_list_type_node);
2455 f_reg = TREE_CHAIN (f_stk);
2456 f_ndx = TREE_CHAIN (f_reg);
2458 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2459 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2460 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2462 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2464 va_size = gen_reg_rtx (SImode);
2465 tmp = fold (build (MULT_EXPR, sizetype,
2466 fold (build (TRUNC_DIV_EXPR, sizetype,
2467 fold (build (PLUS_EXPR, sizetype,
2468 type_size,
2469 size_int (UNITS_PER_WORD - 1))),
2470 size_int (UNITS_PER_WORD))),
2471 size_int (UNITS_PER_WORD)));
2472 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2473 if (r != va_size)
2474 emit_move_insn (va_size, r);
2477 /* First align __va_ndx to a double word boundary if necessary for this arg:
2479 if (__alignof__ (TYPE) > 4)
2480 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2483 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2485 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2486 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2487 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2488 build_int_2 (-2 * UNITS_PER_WORD, -1));
2489 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2490 TREE_SIDE_EFFECTS (tmp) = 1;
2491 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2495 /* Increment __va_ndx to point past the argument:
2497 orig_ndx = (AP).__va_ndx;
2498 (AP).__va_ndx += __va_size (TYPE);
2501 orig_ndx = gen_reg_rtx (SImode);
2502 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2503 if (r != orig_ndx)
2504 emit_move_insn (orig_ndx, r);
2506 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2507 make_tree (intSI_type_node, va_size));
2508 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2509 TREE_SIDE_EFFECTS (tmp) = 1;
2510 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2513 /* Check if the argument is in registers:
2515 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2516 && !MUST_PASS_IN_STACK (type))
2517 __array = (AP).__va_reg;
2520 array = gen_reg_rtx (Pmode);
2522 lab_over = NULL_RTX;
2523 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2525 lab_false = gen_label_rtx ();
2526 lab_over = gen_label_rtx ();
2528 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2529 EXPAND_NORMAL),
2530 GEN_INT (MAX_ARGS_IN_REGISTERS
2531 * UNITS_PER_WORD),
2532 GT, const1_rtx, SImode, 0, lab_false);
2534 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2535 if (r != array)
2536 emit_move_insn (array, r);
2538 emit_jump_insn (gen_jump (lab_over));
2539 emit_barrier ();
2540 emit_label (lab_false);
2543 /* ...otherwise, the argument is on the stack (never split between
2544 registers and the stack -- change __va_ndx if necessary):
2546 else
2548 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2549 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2550 __array = (AP).__va_stk;
2554 lab_false2 = gen_label_rtx ();
2555 emit_cmp_and_jump_insns (orig_ndx,
2556 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2557 GE, const1_rtx, SImode, 0, lab_false2);
2559 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2560 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2561 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2562 TREE_SIDE_EFFECTS (tmp) = 1;
2563 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2565 emit_label (lab_false2);
2567 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2568 if (r != array)
2569 emit_move_insn (array, r);
2571 if (lab_over != NULL_RTX)
2572 emit_label (lab_over);
2575 /* Given the base array pointer (__array) and index to the subsequent
2576 argument (__va_ndx), find the address:
2578 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2579 ? sizeof (TYPE)
2580 : __va_size (TYPE))
2582 The results are endian-dependent because values smaller than one word
2583 are aligned differently.
2586 size = gen_reg_rtx (SImode);
2587 emit_move_insn (size, va_size);
2589 if (BYTES_BIG_ENDIAN)
2591 rtx lab_use_va_size = gen_label_rtx ();
2593 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2594 EXPAND_NORMAL),
2595 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2596 GE, const1_rtx, SImode, 0, lab_use_va_size);
2598 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2599 if (r != size)
2600 emit_move_insn (size, r);
2602 emit_label (lab_use_va_size);
2605 addr_tree = build (PLUS_EXPR, ptr_type_node,
2606 make_tree (ptr_type_node, array),
2607 ndx);
2608 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2609 make_tree (intSI_type_node, size));
2610 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2611 addr = copy_to_reg (addr);
2612 return addr;
2616 enum reg_class
2617 xtensa_preferred_reload_class (x, class, isoutput)
2618 rtx x;
2619 enum reg_class class;
2620 int isoutput;
2622 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2623 return NO_REGS;
2625 /* Don't use the stack pointer or hard frame pointer for reloads!
2626 The hard frame pointer would normally be OK except that it may
2627 briefly hold an incoming argument in the prologue, and reload
2628 won't know that it is live because the hard frame pointer is
2629 treated specially. */
2631 if (class == AR_REGS || class == GR_REGS)
2632 return RL_REGS;
2634 return class;
2638 enum reg_class
2639 xtensa_secondary_reload_class (class, mode, x, isoutput)
2640 enum reg_class class;
2641 enum machine_mode mode ATTRIBUTE_UNUSED;
2642 rtx x;
2643 int isoutput;
2645 int regno;
2647 if (GET_CODE (x) == SIGN_EXTEND)
2648 x = XEXP (x, 0);
2649 regno = xt_true_regnum (x);
2651 if (!isoutput)
2653 if (class == FP_REGS && constantpool_mem_p (x))
2654 return RL_REGS;
2657 if (ACC_REG_P (regno))
2658 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2659 if (class == ACC_REG)
2660 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2662 return NO_REGS;
2666 void
2667 order_regs_for_local_alloc ()
2669 if (!leaf_function_p ())
2671 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2672 FIRST_PSEUDO_REGISTER * sizeof (int));
2674 else
2676 int i, num_arg_regs;
2677 int nxt = 0;
2679 /* use the AR registers in increasing order (skipping a0 and a1)
2680 but save the incoming argument registers for a last resort */
2681 num_arg_regs = current_function_args_info.arg_words;
2682 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2683 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2684 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2685 reg_alloc_order[nxt++] = i + num_arg_regs;
2686 for (i = 0; i < num_arg_regs; i++)
2687 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2689 /* list the FP registers in order for now */
2690 for (i = 0; i < 16; i++)
2691 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2693 /* GCC requires that we list *all* the registers.... */
2694 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2695 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2696 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2697 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2699 /* list the coprocessor registers in order */
2700 for (i = 0; i < BR_REG_NUM; i++)
2701 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2703 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2708 /* A customized version of reg_overlap_mentioned_p that only looks for
2709 references to a7 (as opposed to hard_frame_pointer_rtx). */
2712 a7_overlap_mentioned_p (x)
2713 rtx x;
2715 int i, j;
2716 unsigned int x_regno;
2717 const char *fmt;
2719 if (GET_CODE (x) == REG)
2721 x_regno = REGNO (x);
2722 return (x != hard_frame_pointer_rtx
2723 && x_regno < A7_REG + 1
2724 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2727 if (GET_CODE (x) == SUBREG
2728 && GET_CODE (SUBREG_REG (x)) == REG
2729 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2731 x_regno = subreg_regno (x);
2732 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2733 && x_regno < A7_REG + 1
2734 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2737 /* X does not match, so try its subexpressions. */
2738 fmt = GET_RTX_FORMAT (GET_CODE (x));
2739 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2741 if (fmt[i] == 'e')
2743 if (a7_overlap_mentioned_p (XEXP (x, i)))
2744 return 1;
2746 else if (fmt[i] == 'E')
2748 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2749 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2750 return 1;
2754 return 0;
2758 /* Some Xtensa targets support multiple bss sections. If the section
2759 name ends with ".bss", add SECTION_BSS to the flags. */
2761 static unsigned int
2762 xtensa_multibss_section_type_flags (decl, name, reloc)
2763 tree decl;
2764 const char *name;
2765 int reloc;
2767 unsigned int flags = default_section_type_flags (decl, name, reloc);
2768 const char *suffix;
2770 suffix = strrchr (name, '.');
2771 if (suffix && strcmp (suffix, ".bss") == 0)
2773 if (!decl || (TREE_CODE (decl) == VAR_DECL
2774 && DECL_INITIAL (decl) == NULL_TREE))
2775 flags |= SECTION_BSS; /* @nobits */
2776 else
2777 warning ("only uninitialized variables can be placed in a "
2778 ".bss section");
2781 return flags;
2785 /* The literal pool stays with the function. */
2787 static void
2788 xtensa_select_rtx_section (mode, x, align)
2789 enum machine_mode mode ATTRIBUTE_UNUSED;
2790 rtx x ATTRIBUTE_UNUSED;
2791 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2793 function_section (current_function_decl);
2796 /* If we are referencing a function that is static, make the SYMBOL_REF
2797 special so that we can generate direct calls to it even with -fpic. */
2799 static void
2800 xtensa_encode_section_info (decl, first)
2801 tree decl;
2802 int first ATTRIBUTE_UNUSED;
2804 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2805 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2808 #include "gt-xtensa.h"