* config/xtensa/xtensa.c (xtensa_va_arg): Handle variable-sized types.
[official-gcc.git] / gcc / config / xtensa / xtensa.c
blob0455737baf59ca00412a18c11aacecae26d3aa8a
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "machmode.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "toplev.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "target.h"
47 #include "target-def.h"
49 /* Enumeration for all of the relational tests, so that we can build
50 arrays indexed by the test type, and not worry about the order
51 of EQ, NE, etc. */
53 enum internal_test {
54 ITEST_EQ,
55 ITEST_NE,
56 ITEST_GT,
57 ITEST_GE,
58 ITEST_LT,
59 ITEST_LE,
60 ITEST_GTU,
61 ITEST_GEU,
62 ITEST_LTU,
63 ITEST_LEU,
64 ITEST_MAX
67 /* Cached operands, and operator to compare for use in set/branch on
68 condition codes. */
69 rtx branch_cmp[2];
71 /* what type of branch to use */
72 enum cmp_type branch_type;
74 /* Array giving truth value on whether or not a given hard register
75 can support a given mode. */
76 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
78 /* Current frame size calculated by compute_frame_size. */
79 unsigned xtensa_current_frame_size;
81 /* Tables of ld/st opcode names for block moves */
82 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
83 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
84 #define LARGEST_MOVE_RATIO 15
86 /* Define the structure for the machine field in struct function. */
87 struct machine_function
89 int accesses_prev_frame;
92 /* Vector, indexed by hard register number, which contains 1 for a
93 register that is allowable in a candidate for leaf function
94 treatment. */
96 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
98 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
99 1, 1, 1,
100 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 /* Map hard register number to register class */
105 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
107 GR_REGS, SP_REG, GR_REGS, GR_REGS,
108 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
109 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 AR_REGS, AR_REGS, BR_REGS,
112 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 ACC_REG,
119 /* Map register constraint character to register class. */
120 enum reg_class xtensa_char_to_class[256] =
122 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
123 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 /* This macro generates the assembly code for function entry.
189 FILE is a stdio stream to output the code to.
190 SIZE is an int: how many units of temporary storage to allocate.
191 Refer to the array 'regs_ever_live' to determine which registers
192 to save; 'regs_ever_live[I]' is nonzero if register number I
193 is ever used in the function. This macro is responsible for
194 knowing which registers should not be saved even if used. */
196 #undef TARGET_ASM_FUNCTION_PROLOGUE
197 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
199 /* This macro generates the assembly code for function exit,
200 on machines that need it. If FUNCTION_EPILOGUE is not defined
201 then individual return instructions are generated for each
202 return statement. Args are same as for FUNCTION_PROLOGUE. */
204 #undef TARGET_ASM_FUNCTION_EPILOGUE
205 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
207 /* These hooks specify assembly directives for creating certain kinds
208 of integer object. */
210 #undef TARGET_ASM_ALIGNED_SI_OP
211 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
213 struct gcc_target targetm = TARGET_INITIALIZER;
215 static int b4const_or_zero PARAMS ((int));
216 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
217 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
218 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
219 static rtx gen_conditional_move PARAMS ((rtx));
220 static rtx fixup_subreg_mem PARAMS ((rtx x));
221 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
222 static void xtensa_init_machine_status PARAMS ((struct function *p));
223 static void xtensa_free_machine_status PARAMS ((struct function *p));
224 static void printx PARAMS ((FILE *, signed int));
225 static rtx frame_size_const;
226 static int current_function_arg_words;
227 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
228 REG_ALLOC_ORDER;
232 * Functions to test Xtensa immediate operand validity.
236 xtensa_b4constu (v)
237 int v;
239 switch (v)
241 case 32768:
242 case 65536:
243 case 2:
244 case 3:
245 case 4:
246 case 5:
247 case 6:
248 case 7:
249 case 8:
250 case 10:
251 case 12:
252 case 16:
253 case 32:
254 case 64:
255 case 128:
256 case 256:
257 return 1;
259 return 0;
263 xtensa_simm8x256 (v)
264 int v;
266 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
270 xtensa_ai4const (v)
271 int v;
273 return (v == -1 || (v >= 1 && v <= 15));
277 xtensa_simm7 (v)
278 int v;
280 return v >= -32 && v <= 95;
284 xtensa_b4const (v)
285 int v;
287 switch (v)
289 case -1:
290 case 1:
291 case 2:
292 case 3:
293 case 4:
294 case 5:
295 case 6:
296 case 7:
297 case 8:
298 case 10:
299 case 12:
300 case 16:
301 case 32:
302 case 64:
303 case 128:
304 case 256:
305 return 1;
307 return 0;
311 xtensa_simm8 (v)
312 int v;
314 return v >= -128 && v <= 127;
318 xtensa_tp7 (v)
319 int v;
321 return (v >= 7 && v <= 22);
325 xtensa_lsi4x4 (v)
326 int v;
328 return (v & 3) == 0 && (v >= 0 && v <= 60);
332 xtensa_simm12b (v)
333 int v;
335 return v >= -2048 && v <= 2047;
339 xtensa_uimm8 (v)
340 int v;
342 return v >= 0 && v <= 255;
346 xtensa_uimm8x2 (v)
347 int v;
349 return (v & 1) == 0 && (v >= 0 && v <= 510);
353 xtensa_uimm8x4 (v)
354 int v;
356 return (v & 3) == 0 && (v >= 0 && v <= 1020);
360 /* This is just like the standard true_regnum() function except that it
361 works even when reg_renumber is not initialized. */
364 xt_true_regnum (x)
365 rtx x;
367 if (GET_CODE (x) == REG)
369 if (reg_renumber
370 && REGNO (x) >= FIRST_PSEUDO_REGISTER
371 && reg_renumber[REGNO (x)] >= 0)
372 return reg_renumber[REGNO (x)];
373 return REGNO (x);
375 if (GET_CODE (x) == SUBREG)
377 int base = xt_true_regnum (SUBREG_REG (x));
378 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
379 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
380 GET_MODE (SUBREG_REG (x)),
381 SUBREG_BYTE (x), GET_MODE (x));
383 return -1;
388 add_operand (op, mode)
389 rtx op;
390 enum machine_mode mode;
392 if (GET_CODE (op) == CONST_INT)
393 return (xtensa_simm8 (INTVAL (op)) ||
394 xtensa_simm8x256 (INTVAL (op)));
396 return register_operand (op, mode);
401 arith_operand (op, mode)
402 rtx op;
403 enum machine_mode mode;
405 if (GET_CODE (op) == CONST_INT)
406 return xtensa_simm8 (INTVAL (op));
408 return register_operand (op, mode);
413 nonimmed_operand (op, mode)
414 rtx op;
415 enum machine_mode mode;
417 /* We cannot use the standard nonimmediate_operand() predicate because
418 it includes constant pool memory operands. */
420 if (memory_operand (op, mode))
421 return !constantpool_address_p (XEXP (op, 0));
423 return register_operand (op, mode);
428 mem_operand (op, mode)
429 rtx op;
430 enum machine_mode mode;
432 /* We cannot use the standard memory_operand() predicate because
433 it includes constant pool memory operands. */
435 if (memory_operand (op, mode))
436 return !constantpool_address_p (XEXP (op, 0));
438 return FALSE;
443 xtensa_valid_move (mode, operands)
444 enum machine_mode mode;
445 rtx *operands;
447 /* Either the destination or source must be a register, and the
448 MAC16 accumulator doesn't count. */
450 if (register_operand (operands[0], mode))
452 int dst_regnum = xt_true_regnum (operands[0]);
454 /* The stack pointer can only be assigned with a MOVSP opcode. */
455 if (dst_regnum == STACK_POINTER_REGNUM)
456 return (mode == SImode
457 && register_operand (operands[1], mode)
458 && !ACC_REG_P (xt_true_regnum (operands[1])));
460 if (!ACC_REG_P (dst_regnum))
461 return true;
463 if (register_operand (operands[1], mode))
465 int src_regnum = xt_true_regnum (operands[1]);
466 if (!ACC_REG_P (src_regnum))
467 return true;
469 return FALSE;
474 mask_operand (op, mode)
475 rtx op;
476 enum machine_mode mode;
478 if (GET_CODE (op) == CONST_INT)
479 return xtensa_mask_immediate (INTVAL (op));
481 return register_operand (op, mode);
486 extui_fldsz_operand (op, mode)
487 rtx op;
488 enum machine_mode mode ATTRIBUTE_UNUSED;
490 return ((GET_CODE (op) == CONST_INT)
491 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
496 sext_operand (op, mode)
497 rtx op;
498 enum machine_mode mode;
500 if (TARGET_SEXT)
501 return nonimmed_operand (op, mode);
502 return mem_operand (op, mode);
507 sext_fldsz_operand (op, mode)
508 rtx op;
509 enum machine_mode mode ATTRIBUTE_UNUSED;
511 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
516 lsbitnum_operand (op, mode)
517 rtx op;
518 enum machine_mode mode ATTRIBUTE_UNUSED;
520 if (GET_CODE (op) == CONST_INT)
522 return (BITS_BIG_ENDIAN
523 ? (INTVAL (op) == BITS_PER_WORD-1)
524 : (INTVAL (op) == 0));
526 return FALSE;
530 static int
531 b4const_or_zero (v)
532 int v;
534 if (v == 0)
535 return TRUE;
536 return xtensa_b4const (v);
541 branch_operand (op, mode)
542 rtx op;
543 enum machine_mode mode;
545 if (GET_CODE (op) == CONST_INT)
546 return b4const_or_zero (INTVAL (op));
548 return register_operand (op, mode);
553 ubranch_operand (op, mode)
554 rtx op;
555 enum machine_mode mode;
557 if (GET_CODE (op) == CONST_INT)
558 return xtensa_b4constu (INTVAL (op));
560 return register_operand (op, mode);
565 call_insn_operand (op, mode)
566 rtx op;
567 enum machine_mode mode ATTRIBUTE_UNUSED;
569 if ((GET_CODE (op) == REG)
570 && (op != arg_pointer_rtx)
571 && ((REGNO (op) < FRAME_POINTER_REGNUM)
572 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
573 return TRUE;
575 if (CONSTANT_ADDRESS_P (op))
577 /* Direct calls only allowed to static functions with PIC. */
578 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
579 && SYMBOL_REF_FLAG (op)));
582 return FALSE;
587 move_operand (op, mode)
588 rtx op;
589 enum machine_mode mode;
591 if (register_operand (op, mode))
592 return TRUE;
594 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
595 result in 0/1. */
596 if (GET_CODE (op) == CONSTANT_P_RTX)
597 return TRUE;
599 if (GET_CODE (op) == CONST_INT)
600 return xtensa_simm12b (INTVAL (op));
602 if (GET_CODE (op) == MEM)
603 return memory_address_p (mode, XEXP (op, 0));
605 return FALSE;
610 smalloffset_mem_p (op)
611 rtx op;
613 if (GET_CODE (op) == MEM)
615 rtx addr = XEXP (op, 0);
616 if (GET_CODE (addr) == REG)
617 return REG_OK_FOR_BASE_P (addr);
618 if (GET_CODE (addr) == PLUS)
620 rtx offset = XEXP (addr, 0);
621 if (GET_CODE (offset) != CONST_INT)
622 offset = XEXP (addr, 1);
623 if (GET_CODE (offset) != CONST_INT)
624 return FALSE;
625 return xtensa_lsi4x4 (INTVAL (offset));
628 return FALSE;
633 smalloffset_double_mem_p (op)
634 rtx op;
636 if (!smalloffset_mem_p (op))
637 return FALSE;
638 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
643 constantpool_address_p (addr)
644 rtx addr;
646 rtx sym = addr;
648 if (GET_CODE (addr) == CONST)
650 rtx offset;
652 /* only handle (PLUS (SYM, OFFSET)) form */
653 addr = XEXP (addr, 0);
654 if (GET_CODE (addr) != PLUS)
655 return FALSE;
657 /* make sure the address is word aligned */
658 offset = XEXP (addr, 1);
659 if ((GET_CODE (offset) != CONST_INT)
660 || ((INTVAL (offset) & 3) != 0))
661 return FALSE;
663 sym = XEXP (addr, 0);
666 if ((GET_CODE (sym) == SYMBOL_REF)
667 && CONSTANT_POOL_ADDRESS_P (sym))
668 return TRUE;
669 return FALSE;
674 constantpool_mem_p (op)
675 rtx op;
677 if (GET_CODE (op) == MEM)
678 return constantpool_address_p (XEXP (op, 0));
679 return FALSE;
684 non_const_move_operand (op, mode)
685 rtx op;
686 enum machine_mode mode;
688 if (register_operand (op, mode))
689 return 1;
690 if (GET_CODE (op) == SUBREG)
691 op = SUBREG_REG (op);
692 if (GET_CODE (op) == MEM)
693 return memory_address_p (mode, XEXP (op, 0));
694 return FALSE;
698 /* Accept the floating point constant 1 in the appropriate mode. */
701 const_float_1_operand (op, mode)
702 rtx op;
703 enum machine_mode mode;
705 REAL_VALUE_TYPE d;
706 static REAL_VALUE_TYPE onedf;
707 static REAL_VALUE_TYPE onesf;
708 static int one_initialized;
710 if ((GET_CODE (op) != CONST_DOUBLE)
711 || (mode != GET_MODE (op))
712 || (mode != DFmode && mode != SFmode))
713 return FALSE;
715 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
717 if (! one_initialized)
719 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
720 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
721 one_initialized = TRUE;
724 if (mode == DFmode)
725 return REAL_VALUES_EQUAL (d, onedf);
726 else
727 return REAL_VALUES_EQUAL (d, onesf);
732 fpmem_offset_operand (op, mode)
733 rtx op;
734 enum machine_mode mode ATTRIBUTE_UNUSED;
736 if (GET_CODE (op) == CONST_INT)
737 return xtensa_mem_offset (INTVAL (op), SFmode);
738 return 0;
742 void
743 xtensa_extend_reg (dst, src)
744 rtx dst;
745 rtx src;
747 rtx temp = gen_reg_rtx (SImode);
748 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
750 /* generate paradoxical subregs as needed so that the modes match */
751 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
752 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
754 emit_insn (gen_ashlsi3 (temp, src, shift));
755 emit_insn (gen_ashrsi3 (dst, temp, shift));
759 void
760 xtensa_load_constant (dst, src)
761 rtx dst;
762 rtx src;
764 enum machine_mode mode = GET_MODE (dst);
765 src = force_const_mem (SImode, src);
767 /* PC-relative loads are always SImode so we have to add a SUBREG if that
768 is not the desired mode */
770 if (mode != SImode)
772 if (register_operand (dst, mode))
773 dst = simplify_gen_subreg (SImode, dst, mode, 0);
774 else
776 src = force_reg (SImode, src);
777 src = gen_lowpart_SUBREG (mode, src);
781 emit_move_insn (dst, src);
786 branch_operator (x, mode)
787 rtx x;
788 enum machine_mode mode;
790 if (GET_MODE (x) != mode)
791 return FALSE;
793 switch (GET_CODE (x))
795 case EQ:
796 case NE:
797 case LT:
798 case GE:
799 return TRUE;
800 default:
801 break;
803 return FALSE;
808 ubranch_operator (x, mode)
809 rtx x;
810 enum machine_mode mode;
812 if (GET_MODE (x) != mode)
813 return FALSE;
815 switch (GET_CODE (x))
817 case LTU:
818 case GEU:
819 return TRUE;
820 default:
821 break;
823 return FALSE;
828 boolean_operator (x, mode)
829 rtx x;
830 enum machine_mode mode;
832 if (GET_MODE (x) != mode)
833 return FALSE;
835 switch (GET_CODE (x))
837 case EQ:
838 case NE:
839 return TRUE;
840 default:
841 break;
843 return FALSE;
848 xtensa_mask_immediate (v)
849 int v;
851 #define MAX_MASK_SIZE 16
852 int mask_size;
854 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
856 if ((v & 1) == 0)
857 return FALSE;
858 v = v >> 1;
859 if (v == 0)
860 return TRUE;
863 return FALSE;
868 xtensa_mem_offset (v, mode)
869 unsigned v;
870 enum machine_mode mode;
872 switch (mode)
874 case BLKmode:
875 /* Handle the worst case for block moves. See xtensa_expand_block_move
876 where we emit an optimized block move operation if the block can be
877 moved in < "move_ratio" pieces. The worst case is when the block is
878 aligned but has a size of (3 mod 4) (does this happen?) so that the
879 last piece requires a byte load/store. */
880 return (xtensa_uimm8 (v) &&
881 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
883 case QImode:
884 return xtensa_uimm8 (v);
886 case HImode:
887 return xtensa_uimm8x2 (v);
889 case DFmode:
890 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
892 default:
893 break;
896 return xtensa_uimm8x4 (v);
900 /* Make normal rtx_code into something we can index from an array */
902 static enum internal_test
903 map_test_to_internal_test (test_code)
904 enum rtx_code test_code;
906 enum internal_test test = ITEST_MAX;
908 switch (test_code)
910 default: break;
911 case EQ: test = ITEST_EQ; break;
912 case NE: test = ITEST_NE; break;
913 case GT: test = ITEST_GT; break;
914 case GE: test = ITEST_GE; break;
915 case LT: test = ITEST_LT; break;
916 case LE: test = ITEST_LE; break;
917 case GTU: test = ITEST_GTU; break;
918 case GEU: test = ITEST_GEU; break;
919 case LTU: test = ITEST_LTU; break;
920 case LEU: test = ITEST_LEU; break;
923 return test;
927 /* Generate the code to compare two integer values. The return value is
928 the comparison expression. */
930 static rtx
931 gen_int_relational (test_code, cmp0, cmp1, p_invert)
932 enum rtx_code test_code; /* relational test (EQ, etc) */
933 rtx cmp0; /* first operand to compare */
934 rtx cmp1; /* second operand to compare */
935 int *p_invert; /* whether branch needs to reverse its test */
937 struct cmp_info {
938 enum rtx_code test_code; /* test code to use in insn */
939 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
940 int const_add; /* constant to add (convert LE -> LT) */
941 int reverse_regs; /* reverse registers in test */
942 int invert_const; /* != 0 if invert value if cmp1 is constant */
943 int invert_reg; /* != 0 if invert value if cmp1 is register */
944 int unsignedp; /* != 0 for unsigned comparisons. */
947 static struct cmp_info info[ (int)ITEST_MAX ] = {
949 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
950 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
952 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
953 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
954 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
955 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
957 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
958 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
959 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
960 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
963 enum internal_test test;
964 enum machine_mode mode;
965 struct cmp_info *p_info;
967 test = map_test_to_internal_test (test_code);
968 if (test == ITEST_MAX)
969 abort ();
971 p_info = &info[ (int)test ];
973 mode = GET_MODE (cmp0);
974 if (mode == VOIDmode)
975 mode = GET_MODE (cmp1);
977 /* Make sure we can handle any constants given to us. */
978 if (GET_CODE (cmp1) == CONST_INT)
980 HOST_WIDE_INT value = INTVAL (cmp1);
981 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
983 /* if the immediate overflows or does not fit in the immediate field,
984 spill it to a register */
986 if ((p_info->unsignedp ?
987 (uvalue + p_info->const_add > uvalue) :
988 (value + p_info->const_add > value)) != (p_info->const_add > 0))
990 cmp1 = force_reg (mode, cmp1);
992 else if (!(p_info->const_range_p) (value + p_info->const_add))
994 cmp1 = force_reg (mode, cmp1);
997 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
999 cmp1 = force_reg (mode, cmp1);
1002 /* See if we need to invert the result. */
1003 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1004 ? p_info->invert_const
1005 : p_info->invert_reg);
1007 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1008 Comparison between two registers, may involve switching operands. */
1009 if (GET_CODE (cmp1) == CONST_INT)
1011 if (p_info->const_add != 0)
1012 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1015 else if (p_info->reverse_regs)
1017 rtx temp = cmp0;
1018 cmp0 = cmp1;
1019 cmp1 = temp;
1022 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1026 /* Generate the code to compare two float values. The return value is
1027 the comparison expression. */
1029 static rtx
1030 gen_float_relational (test_code, cmp0, cmp1)
1031 enum rtx_code test_code; /* relational test (EQ, etc) */
1032 rtx cmp0; /* first operand to compare */
1033 rtx cmp1; /* second operand to compare */
1035 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1036 rtx brtmp;
1037 int reverse_regs, invert;
1039 switch (test_code)
1041 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1042 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1043 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1044 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1045 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1046 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1047 default:
1048 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1049 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1052 if (reverse_regs)
1054 rtx temp = cmp0;
1055 cmp0 = cmp1;
1056 cmp1 = temp;
1059 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1060 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1062 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1066 void
1067 xtensa_expand_conditional_branch (operands, test_code)
1068 rtx *operands;
1069 enum rtx_code test_code;
1071 enum cmp_type type = branch_type;
1072 rtx cmp0 = branch_cmp[0];
1073 rtx cmp1 = branch_cmp[1];
1074 rtx cmp;
1075 int invert;
1076 rtx label1, label2;
1078 switch (type)
1080 case CMP_DF:
1081 default:
1082 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1084 case CMP_SI:
1085 invert = FALSE;
1086 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1087 break;
1089 case CMP_SF:
1090 if (!TARGET_HARD_FLOAT)
1091 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1092 invert = FALSE;
1093 cmp = gen_float_relational (test_code, cmp0, cmp1);
1094 break;
1097 /* Generate the branch. */
1099 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1100 label2 = pc_rtx;
1102 if (invert)
1104 label2 = label1;
1105 label1 = pc_rtx;
1108 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1109 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1110 label1,
1111 label2)));
1115 static rtx
1116 gen_conditional_move (cmp)
1117 rtx cmp;
1119 enum rtx_code code = GET_CODE (cmp);
1120 rtx op0 = branch_cmp[0];
1121 rtx op1 = branch_cmp[1];
1123 if (branch_type == CMP_SI)
1125 /* Jump optimization calls get_condition() which canonicalizes
1126 comparisons like (GE x <const>) to (GT x <const-1>).
1127 Transform those comparisons back to GE, since that is the
1128 comparison supported in Xtensa. We shouldn't have to
1129 transform <LE x const> comparisons, because neither
1130 xtensa_expand_conditional_branch() nor get_condition() will
1131 produce them. */
1133 if ((code == GT) && (op1 == constm1_rtx))
1135 code = GE;
1136 op1 = const0_rtx;
1138 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1140 if (boolean_operator (cmp, VOIDmode))
1142 /* swap the operands to make const0 second */
1143 if (op0 == const0_rtx)
1145 op0 = op1;
1146 op1 = const0_rtx;
1149 /* if not comparing against zero, emit a comparison (subtract) */
1150 if (op1 != const0_rtx)
1152 op0 = expand_binop (SImode, sub_optab, op0, op1,
1153 0, 0, OPTAB_LIB_WIDEN);
1154 op1 = const0_rtx;
1157 else if (branch_operator (cmp, VOIDmode))
1159 /* swap the operands to make const0 second */
1160 if (op0 == const0_rtx)
1162 op0 = op1;
1163 op1 = const0_rtx;
1165 switch (code)
1167 case LT: code = GE; break;
1168 case GE: code = LT; break;
1169 default: abort ();
1173 if (op1 != const0_rtx)
1174 return 0;
1176 else
1177 return 0;
1179 return gen_rtx (code, VOIDmode, op0, op1);
1182 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1183 return gen_float_relational (code, op0, op1);
1185 return 0;
1190 xtensa_expand_conditional_move (operands, isflt)
1191 rtx *operands;
1192 int isflt;
1194 rtx cmp;
1195 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1197 if (!(cmp = gen_conditional_move (operands[1])))
1198 return 0;
1200 if (isflt)
1201 gen_fn = (branch_type == CMP_SI
1202 ? gen_movsfcc_internal0
1203 : gen_movsfcc_internal1);
1204 else
1205 gen_fn = (branch_type == CMP_SI
1206 ? gen_movsicc_internal0
1207 : gen_movsicc_internal1);
1209 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1210 operands[2], operands[3], cmp));
1211 return 1;
1216 xtensa_expand_scc (operands)
1217 rtx *operands;
1219 rtx dest = operands[0];
1220 rtx cmp = operands[1];
1221 rtx one_tmp, zero_tmp;
1222 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1224 if (!(cmp = gen_conditional_move (cmp)))
1225 return 0;
1227 one_tmp = gen_reg_rtx (SImode);
1228 zero_tmp = gen_reg_rtx (SImode);
1229 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1230 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1232 gen_fn = (branch_type == CMP_SI
1233 ? gen_movsicc_internal0
1234 : gen_movsicc_internal1);
1235 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1236 return 1;
1240 /* Emit insns to move operands[1] into operands[0].
1242 Return 1 if we have written out everything that needs to be done to
1243 do the move. Otherwise, return 0 and the caller will emit the move
1244 normally. */
1247 xtensa_emit_move_sequence (operands, mode)
1248 rtx *operands;
1249 enum machine_mode mode;
1251 if (CONSTANT_P (operands[1])
1252 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1253 && (GET_CODE (operands[1]) != CONST_INT
1254 || !xtensa_simm12b (INTVAL (operands[1]))))
1256 xtensa_load_constant (operands[0], operands[1]);
1257 return 1;
1260 if (!(reload_in_progress | reload_completed))
1262 if (!xtensa_valid_move (mode, operands))
1263 operands[1] = force_reg (mode, operands[1]);
1265 /* Check if this move is copying an incoming argument in a7. If
1266 so, emit the move, followed by the special "set_frame_ptr"
1267 unspec_volatile insn, at the very beginning of the function.
1268 This is necessary because the register allocator will ignore
1269 conflicts with a7 and may assign some other pseudo to a7. If
1270 that pseudo was assigned prior to this move, it would clobber
1271 the incoming argument in a7. By copying the argument out of
1272 a7 as the very first thing, and then immediately following
1273 that with an unspec_volatile to keep the scheduler away, we
1274 should avoid any problems. */
1276 if (a7_overlap_mentioned_p (operands[1]))
1278 rtx mov;
1279 switch (mode)
1281 case SImode:
1282 mov = gen_movsi_internal (operands[0], operands[1]);
1283 break;
1284 case HImode:
1285 mov = gen_movhi_internal (operands[0], operands[1]);
1286 break;
1287 case QImode:
1288 mov = gen_movqi_internal (operands[0], operands[1]);
1289 break;
1290 default:
1291 abort ();
1294 /* Insert the instructions before any other argument copies.
1295 (The set_frame_ptr insn comes _after_ the move, so push it
1296 out first.) */
1297 push_topmost_sequence ();
1298 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1299 emit_insn_after (mov, get_insns ());
1300 pop_topmost_sequence ();
1302 return 1;
1306 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1307 instruction won't be recognized after reload. So we remove the
1308 subreg and adjust mem accordingly. */
1309 if (reload_in_progress)
1311 operands[0] = fixup_subreg_mem (operands[0]);
1312 operands[1] = fixup_subreg_mem (operands[1]);
1314 return 0;
1317 static rtx
1318 fixup_subreg_mem (x)
1319 rtx x;
1321 if (GET_CODE (x) == SUBREG
1322 && GET_CODE (SUBREG_REG (x)) == REG
1323 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1325 rtx temp =
1326 gen_rtx_SUBREG (GET_MODE (x),
1327 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1328 SUBREG_BYTE (x));
1329 x = alter_subreg (&temp);
1331 return x;
1335 /* Try to expand a block move operation to an RTL block move instruction.
1336 If not optimizing or if the block size is not a constant or if the
1337 block is small, the expansion fails and GCC falls back to calling
1338 memcpy().
1340 operands[0] is the destination
1341 operands[1] is the source
1342 operands[2] is the length
1343 operands[3] is the alignment */
1346 xtensa_expand_block_move (operands)
1347 rtx *operands;
1349 rtx dest = operands[0];
1350 rtx src = operands[1];
1351 int bytes = INTVAL (operands[2]);
1352 int align = XINT (operands[3], 0);
1353 int num_pieces, move_ratio;
1355 /* If this is not a fixed size move, just call memcpy */
1356 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1357 return 0;
1359 /* Anything to move? */
1360 if (bytes <= 0)
1361 return 1;
1363 if (align > MOVE_MAX)
1364 align = MOVE_MAX;
1366 /* decide whether to expand inline based on the optimization level */
1367 move_ratio = 4;
1368 if (optimize > 2)
1369 move_ratio = LARGEST_MOVE_RATIO;
1370 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1371 if (num_pieces >= move_ratio)
1372 return 0;
1374 /* make sure the memory addresses are valid */
1375 operands[0] = validize_mem (dest);
1376 operands[1] = validize_mem (src);
1378 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1379 operands[2], operands[3]));
1380 return 1;
1384 /* Emit a sequence of instructions to implement a block move, trying
1385 to hide load delay slots as much as possible. Load N values into
1386 temporary registers, store those N values, and repeat until the
1387 complete block has been moved. N=delay_slots+1 */
1389 struct meminsnbuf {
1390 char template[30];
1391 rtx operands[2];
1394 void
1395 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1396 rtx *operands;
1397 rtx *tmpregs;
1398 int delay_slots;
1400 rtx dest = operands[0];
1401 rtx src = operands[1];
1402 int bytes = INTVAL (operands[2]);
1403 int align = XINT (operands[3], 0);
1404 rtx from_addr = XEXP (src, 0);
1405 rtx to_addr = XEXP (dest, 0);
1406 int from_struct = MEM_IN_STRUCT_P (src);
1407 int to_struct = MEM_IN_STRUCT_P (dest);
1408 int offset = 0;
1409 int chunk_size, item_size;
1410 struct meminsnbuf *ldinsns, *stinsns;
1411 const char *ldname, *stname;
1412 enum machine_mode mode;
1414 if (align > MOVE_MAX)
1415 align = MOVE_MAX;
1416 item_size = align;
1417 chunk_size = delay_slots + 1;
1419 ldinsns = (struct meminsnbuf *)
1420 alloca (chunk_size * sizeof (struct meminsnbuf));
1421 stinsns = (struct meminsnbuf *)
1422 alloca (chunk_size * sizeof (struct meminsnbuf));
1424 mode = xtensa_find_mode_for_size (item_size);
1425 item_size = GET_MODE_SIZE (mode);
1426 ldname = xtensa_ld_opcodes[(int) mode];
1427 stname = xtensa_st_opcodes[(int) mode];
1429 while (bytes > 0)
1431 int n;
1433 for (n = 0; n < chunk_size; n++)
1435 rtx addr, mem;
1437 if (bytes == 0)
1439 chunk_size = n;
1440 break;
1443 if (bytes < item_size)
1445 /* find a smaller item_size which we can load & store */
1446 item_size = bytes;
1447 mode = xtensa_find_mode_for_size (item_size);
1448 item_size = GET_MODE_SIZE (mode);
1449 ldname = xtensa_ld_opcodes[(int) mode];
1450 stname = xtensa_st_opcodes[(int) mode];
1453 /* record the load instruction opcode and operands */
1454 addr = plus_constant (from_addr, offset);
1455 mem = gen_rtx_MEM (mode, addr);
1456 if (! memory_address_p (mode, addr))
1457 abort ();
1458 MEM_IN_STRUCT_P (mem) = from_struct;
1459 ldinsns[n].operands[0] = tmpregs[n];
1460 ldinsns[n].operands[1] = mem;
1461 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1463 /* record the store instruction opcode and operands */
1464 addr = plus_constant (to_addr, offset);
1465 mem = gen_rtx_MEM (mode, addr);
1466 if (! memory_address_p (mode, addr))
1467 abort ();
1468 MEM_IN_STRUCT_P (mem) = to_struct;
1469 stinsns[n].operands[0] = tmpregs[n];
1470 stinsns[n].operands[1] = mem;
1471 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1473 offset += item_size;
1474 bytes -= item_size;
1477 /* now output the loads followed by the stores */
1478 for (n = 0; n < chunk_size; n++)
1479 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1480 for (n = 0; n < chunk_size; n++)
1481 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1486 static enum machine_mode
1487 xtensa_find_mode_for_size (item_size)
1488 unsigned item_size;
1490 enum machine_mode mode, tmode;
1492 while (1)
1494 mode = VOIDmode;
1496 /* find mode closest to but not bigger than item_size */
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) <= item_size)
1500 mode = tmode;
1501 if (mode == VOIDmode)
1502 abort ();
1504 item_size = GET_MODE_SIZE (mode);
1506 if (xtensa_ld_opcodes[(int) mode]
1507 && xtensa_st_opcodes[(int) mode])
1508 break;
1510 /* cannot load & store this mode; try something smaller */
1511 item_size -= 1;
1514 return mode;
1518 void
1519 xtensa_expand_nonlocal_goto (operands)
1520 rtx *operands;
1522 rtx goto_handler = operands[1];
1523 rtx containing_fp = operands[3];
1525 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1526 is too big to generate in-line */
1528 if (GET_CODE (containing_fp) != REG)
1529 containing_fp = force_reg (Pmode, containing_fp);
1531 goto_handler = replace_rtx (copy_rtx (goto_handler),
1532 virtual_stack_vars_rtx,
1533 containing_fp);
1535 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1536 0, VOIDmode, 2,
1537 containing_fp, Pmode,
1538 goto_handler, Pmode);
1542 static void
1543 xtensa_init_machine_status (p)
1544 struct function *p;
1546 p->machine = (struct machine_function *)
1547 xcalloc (1, sizeof (struct machine_function));
1551 static void
1552 xtensa_free_machine_status (p)
1553 struct function *p;
1555 free (p->machine);
1556 p->machine = NULL;
1560 void
1561 xtensa_setup_frame_addresses ()
1563 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1564 cfun->machine->accesses_prev_frame = 1;
1566 emit_library_call
1567 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1568 0, VOIDmode, 0);
1572 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1573 a comment showing where the end of the loop is. However, if there is a
1574 label or a branch at the end of the loop then we need to place a nop
1575 there. If the loop ends with a label we need the nop so that branches
1576 targetting that label will target the nop (and thus remain in the loop),
1577 instead of targetting the instruction after the loop (and thus exiting
1578 the loop). If the loop ends with a branch, we need the nop in case the
1579 branch is targetting a location inside the loop. When the branch
1580 executes it will cause the loop count to be decremented even if it is
1581 taken (because it is the last instruction in the loop), so we need to
1582 nop after the branch to prevent the loop count from being decremented
1583 when the branch is taken. */
1585 void
1586 xtensa_emit_loop_end (insn, operands)
1587 rtx insn;
1588 rtx *operands;
1590 char done = 0;
1592 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1594 switch (GET_CODE (insn))
1596 case NOTE:
1597 case BARRIER:
1598 break;
1600 case CODE_LABEL:
1601 output_asm_insn ("nop.n", operands);
1602 done = 1;
1603 break;
1605 default:
1607 rtx body = PATTERN (insn);
1609 if (GET_CODE (body) == JUMP_INSN)
1611 output_asm_insn ("nop.n", operands);
1612 done = 1;
1614 else if ((GET_CODE (body) != USE)
1615 && (GET_CODE (body) != CLOBBER))
1616 done = 1;
1618 break;
1622 output_asm_insn ("# loop end for %0", operands);
1626 char *
1627 xtensa_emit_call (callop, operands)
1628 int callop;
1629 rtx *operands;
1631 char *result = (char *) malloc (64);
1632 rtx tgt = operands[callop];
1634 if (GET_CODE (tgt) == CONST_INT)
1635 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1636 else if (register_operand (tgt, VOIDmode))
1637 sprintf (result, "callx8\t%%%d", callop);
1638 else
1639 sprintf (result, "call8\t%%%d", callop);
1641 return result;
1645 /* Return the stabs register number to use for 'regno'. */
1648 xtensa_dbx_register_number (regno)
1649 int regno;
1651 int first = -1;
1653 if (GP_REG_P (regno)) {
1654 regno -= GP_REG_FIRST;
1655 first = 0;
1657 else if (BR_REG_P (regno)) {
1658 regno -= BR_REG_FIRST;
1659 first = 16;
1661 else if (FP_REG_P (regno)) {
1662 regno -= FP_REG_FIRST;
1663 /* The current numbering convention is that TIE registers are
1664 numbered in libcc order beginning with 256. We can't guarantee
1665 that the FP registers will come first, so the following is just
1666 a guess. It seems like we should make a special case for FP
1667 registers and give them fixed numbers < 256. */
1668 first = 256;
1670 else if (ACC_REG_P (regno))
1672 first = 0;
1673 regno = -1;
1676 /* When optimizing, we sometimes get asked about pseudo-registers
1677 that don't represent hard registers. Return 0 for these. */
1678 if (first == -1)
1679 return 0;
1681 return first + regno;
1685 /* Argument support functions. */
1687 /* Initialize CUMULATIVE_ARGS for a function. */
1689 void
1690 init_cumulative_args (cum, fntype, libname)
1691 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1692 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1693 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1695 cum->arg_words = 0;
1698 /* Advance the argument to the next argument position. */
1700 void
1701 function_arg_advance (cum, mode, type)
1702 CUMULATIVE_ARGS *cum; /* current arg information */
1703 enum machine_mode mode; /* current arg mode */
1704 tree type; /* type of the argument or 0 if lib support */
1706 int words, max;
1707 int *arg_words;
1709 arg_words = &cum->arg_words;
1710 max = MAX_ARGS_IN_REGISTERS;
1712 words = (((mode != BLKmode)
1713 ? (int) GET_MODE_SIZE (mode)
1714 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1716 if ((*arg_words + words > max) && (*arg_words < max))
1717 *arg_words = max;
1719 *arg_words += words;
1723 /* Return an RTL expression containing the register for the given mode,
1724 or 0 if the argument is to be passed on the stack. */
1727 function_arg (cum, mode, type, incoming_p)
1728 CUMULATIVE_ARGS *cum; /* current arg information */
1729 enum machine_mode mode; /* current arg mode */
1730 tree type; /* type of the argument or 0 if lib support */
1731 int incoming_p; /* computing the incoming registers? */
1733 int regbase, words, max;
1734 int *arg_words;
1735 int regno;
1736 enum machine_mode result_mode;
1738 arg_words = &cum->arg_words;
1739 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1740 max = MAX_ARGS_IN_REGISTERS;
1742 words = (((mode != BLKmode)
1743 ? (int) GET_MODE_SIZE (mode)
1744 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1746 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1747 *arg_words += (*arg_words & 1);
1749 if (*arg_words + words > max)
1750 return (rtx)0;
1752 regno = regbase + *arg_words;
1753 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1757 modes bigger than 2 words (because we only have patterns for
1758 modes of 2 words or smaller), we can't control the expansion
1759 unless we explicitly list the individual registers in a PARALLEL. */
1761 if ((mode == BLKmode || words > 2)
1762 && regno < A7_REG
1763 && regno + words > A7_REG)
1765 rtx result;
1766 int n;
1768 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1769 for (n = 0; n < words; n++)
1771 XVECEXP (result, 0, n) =
1772 gen_rtx_EXPR_LIST (VOIDmode,
1773 gen_raw_REG (SImode, regno + n),
1774 GEN_INT (n * UNITS_PER_WORD));
1776 return result;
1779 return gen_raw_REG (result_mode, regno);
1783 void
1784 override_options ()
1786 int regno;
1787 enum machine_mode mode;
1789 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1790 error ("boolean registers required for the floating-point option");
1792 /* set up the tables of ld/st opcode names for block moves */
1793 xtensa_ld_opcodes[(int) SImode] = "l32i";
1794 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1795 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1796 xtensa_st_opcodes[(int) SImode] = "s32i";
1797 xtensa_st_opcodes[(int) HImode] = "s16i";
1798 xtensa_st_opcodes[(int) QImode] = "s8i";
1800 xtensa_char_to_class['q'] = SP_REG;
1801 xtensa_char_to_class['a'] = GR_REGS;
1802 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1803 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1804 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1805 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1806 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1810 /* Set up array giving whether a given register can hold a given mode. */
1811 for (mode = VOIDmode;
1812 mode != MAX_MACHINE_MODE;
1813 mode = (enum machine_mode) ((int) mode + 1))
1815 int size = GET_MODE_SIZE (mode);
1816 enum mode_class class = GET_MODE_CLASS (mode);
1818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1820 int temp;
1822 if (ACC_REG_P (regno))
1823 temp = (TARGET_MAC16 &&
1824 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1825 else if (GP_REG_P (regno))
1826 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1827 else if (FP_REG_P (regno))
1828 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1829 else if (BR_REG_P (regno))
1830 temp = (TARGET_BOOLEANS && (mode == CCmode));
1831 else
1832 temp = FALSE;
1834 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1838 init_machine_status = xtensa_init_machine_status;
1839 free_machine_status = xtensa_free_machine_status;
1841 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1842 some targets need to always use PIC. */
1843 if (XTENSA_ALWAYS_PIC)
1845 if (flag_pic)
1846 warning ("-f%s ignored (all code is position independent)",
1847 (flag_pic > 1 ? "PIC" : "pic"));
1848 flag_pic = 1;
1850 if (flag_pic > 1)
1851 flag_pic = 1;
1855 /* A C compound statement to output to stdio stream STREAM the
1856 assembler syntax for an instruction operand X. X is an RTL
1857 expression.
1859 CODE is a value that can be used to specify one of several ways
1860 of printing the operand. It is used when identical operands
1861 must be printed differently depending on the context. CODE
1862 comes from the '%' specification that was used to request
1863 printing of the operand. If the specification was just '%DIGIT'
1864 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1865 is the ASCII code for LTR.
1867 If X is a register, this macro should print the register's name.
1868 The names can be found in an array 'reg_names' whose type is
1869 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1871 When the machine description has a specification '%PUNCT' (a '%'
1872 followed by a punctuation character), this macro is called with
1873 a null pointer for X and the punctuation character for CODE.
1875 'a', 'c', 'l', and 'n' are reserved.
1877 The Xtensa specific codes are:
1879 'd' CONST_INT, print as signed decimal
1880 'x' CONST_INT, print as signed hexadecimal
1881 'K' CONST_INT, print number of bits in mask for EXTUI
1882 'R' CONST_INT, print (X & 0x1f)
1883 'L' CONST_INT, print ((32 - X) & 0x1f)
1884 'D' REG, print second register of double-word register operand
1885 'N' MEM, print address of next word following a memory operand
1886 'v' MEM, if memory reference is volatile, output a MEMW before it
1889 static void
1890 printx (file, val)
1891 FILE *file;
1892 signed int val;
1894 /* print a hexadecimal value in a nice way */
1895 if ((val > -0xa) && (val < 0xa))
1896 fprintf (file, "%d", val);
1897 else if (val < 0)
1898 fprintf (file, "-0x%x", -val);
1899 else
1900 fprintf (file, "0x%x", val);
1904 void
1905 print_operand (file, op, letter)
1906 FILE *file; /* file to write to */
1907 rtx op; /* operand to print */
1908 int letter; /* %<letter> or 0 */
1910 enum rtx_code code;
1912 if (! op)
1913 error ("PRINT_OPERAND null pointer");
1915 code = GET_CODE (op);
1916 switch (code)
1918 case REG:
1919 case SUBREG:
1921 int regnum = xt_true_regnum (op);
1922 if (letter == 'D')
1923 regnum++;
1924 fprintf (file, "%s", reg_names[regnum]);
1925 break;
1928 case MEM:
1929 /* For a volatile memory reference, emit a MEMW before the
1930 load or store. */
1931 if (letter == 'v')
1933 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1934 fprintf (file, "memw\n\t");
1935 break;
1937 else if (letter == 'N')
1939 enum machine_mode mode;
1940 switch (GET_MODE (op))
1942 case DFmode: mode = SFmode; break;
1943 case DImode: mode = SImode; break;
1944 default: abort ();
1946 op = adjust_address (op, mode, 4);
1949 output_address (XEXP (op, 0));
1950 break;
1952 case CONST_INT:
1953 switch (letter)
1955 case 'K':
1957 int num_bits = 0;
1958 unsigned val = INTVAL (op);
1959 while (val & 1)
1961 num_bits += 1;
1962 val = val >> 1;
1964 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1965 fatal_insn ("invalid mask", op);
1967 fprintf (file, "%d", num_bits);
1968 break;
1971 case 'L':
1972 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1973 break;
1975 case 'R':
1976 fprintf (file, "%d", INTVAL (op) & 0x1f);
1977 break;
1979 case 'x':
1980 printx (file, INTVAL (op));
1981 break;
1983 case 'd':
1984 default:
1985 fprintf (file, "%d", INTVAL (op));
1986 break;
1989 break;
1991 default:
1992 output_addr_const (file, op);
1997 /* A C compound statement to output to stdio stream STREAM the
1998 assembler syntax for an instruction operand that is a memory
1999 reference whose address is ADDR. ADDR is an RTL expression.
2001 On some machines, the syntax for a symbolic address depends on
2002 the section that the address refers to. On these machines,
2003 define the macro 'ENCODE_SECTION_INFO' to store the information
2004 into the 'symbol_ref', and then check for it here. */
2006 void
2007 print_operand_address (file, addr)
2008 FILE *file;
2009 rtx addr;
2011 if (!addr)
2012 error ("PRINT_OPERAND_ADDRESS, null pointer");
2014 switch (GET_CODE (addr))
2016 default:
2017 fatal_insn ("invalid address", addr);
2018 break;
2020 case REG:
2021 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2022 break;
2024 case PLUS:
2026 rtx reg = (rtx)0;
2027 rtx offset = (rtx)0;
2028 rtx arg0 = XEXP (addr, 0);
2029 rtx arg1 = XEXP (addr, 1);
2031 if (GET_CODE (arg0) == REG)
2033 reg = arg0;
2034 offset = arg1;
2036 else if (GET_CODE (arg1) == REG)
2038 reg = arg1;
2039 offset = arg0;
2041 else
2042 fatal_insn ("no register in address", addr);
2044 if (CONSTANT_P (offset))
2046 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2047 output_addr_const (file, offset);
2049 else
2050 fatal_insn ("address offset not a constant", addr);
2052 break;
2054 case LABEL_REF:
2055 case SYMBOL_REF:
2056 case CONST_INT:
2057 case CONST:
2058 output_addr_const (file, addr);
2059 break;
2064 /* Emit either a label, .comm, or .lcomm directive. */
2066 void
2067 xtensa_declare_object (file, name, init_string, final_string, size)
2068 FILE *file;
2069 char *name;
2070 char *init_string;
2071 char *final_string;
2072 int size;
2074 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2075 assemble_name (file, name);
2076 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2080 void
2081 xtensa_output_literal (file, x, mode, labelno)
2082 FILE *file;
2083 rtx x;
2084 enum machine_mode mode;
2085 int labelno;
2087 long value_long[2];
2088 REAL_VALUE_TYPE r;
2089 int size;
2091 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2093 switch (GET_MODE_CLASS (mode))
2095 case MODE_FLOAT:
2096 if (GET_CODE (x) != CONST_DOUBLE)
2097 abort ();
2099 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2100 switch (mode)
2102 case SFmode:
2103 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2104 fprintf (file, "0x%08lx\n", value_long[0]);
2105 break;
2107 case DFmode:
2108 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2109 fprintf (file, "0x%08lx, 0x%08lx\n",
2110 value_long[0], value_long[1]);
2111 break;
2113 default:
2114 abort ();
2117 break;
2119 case MODE_INT:
2120 case MODE_PARTIAL_INT:
2121 size = GET_MODE_SIZE (mode);
2122 if (size == 4)
2124 output_addr_const (file, x);
2125 fputs ("\n", file);
2127 else if (size == 8)
2129 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2130 fputs (", ", file);
2131 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2132 fputs ("\n", file);
2134 else
2135 abort ();
2136 break;
2138 default:
2139 abort ();
2144 /* Return the bytes needed to compute the frame pointer from the current
2145 stack pointer. */
2147 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2148 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2150 long
2151 compute_frame_size (size)
2152 int size; /* # of var. bytes allocated */
2154 /* add space for the incoming static chain value */
2155 if (current_function_needs_context)
2156 size += (1 * UNITS_PER_WORD);
2158 xtensa_current_frame_size =
2159 XTENSA_STACK_ALIGN (size
2160 + current_function_outgoing_args_size
2161 + (WINDOW_SIZE * UNITS_PER_WORD));
2162 return xtensa_current_frame_size;
2167 xtensa_frame_pointer_required ()
2169 /* The code to expand builtin_frame_addr and builtin_return_addr
2170 currently uses the hard_frame_pointer instead of frame_pointer.
2171 This seems wrong but maybe it's necessary for other architectures.
2172 This function is derived from the i386 code. */
2174 if (cfun->machine->accesses_prev_frame)
2175 return 1;
2177 return 0;
2181 void
2182 xtensa_reorg (first)
2183 rtx first;
2185 rtx insn, set_frame_ptr_insn = 0;
2187 unsigned long tsize = compute_frame_size (get_frame_size ());
2188 if (tsize < (1 << (12+3)))
2189 frame_size_const = 0;
2190 else
2192 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2194 /* make sure the constant is used so it doesn't get eliminated
2195 from the constant pool */
2196 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2199 if (!frame_pointer_needed)
2200 return;
2202 /* Search all instructions, looking for the insn that sets up the
2203 frame pointer. This search will fail if the function does not
2204 have an incoming argument in $a7, but in that case, we can just
2205 set up the frame pointer at the very beginning of the
2206 function. */
2208 for (insn = first; insn; insn = NEXT_INSN (insn))
2210 rtx pat;
2212 if (!INSN_P (insn))
2213 continue;
2215 pat = PATTERN (insn);
2216 if (GET_CODE (pat) == UNSPEC_VOLATILE
2217 && (XINT (pat, 1) == UNSPECV_SET_FP))
2219 set_frame_ptr_insn = insn;
2220 break;
2224 if (set_frame_ptr_insn)
2226 /* for all instructions prior to set_frame_ptr_insn, replace
2227 hard_frame_pointer references with stack_pointer */
2228 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2230 if (INSN_P (insn))
2231 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2232 hard_frame_pointer_rtx,
2233 stack_pointer_rtx);
2236 else
2238 /* emit the frame pointer move immediately after the NOTE that starts
2239 the function */
2240 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2241 stack_pointer_rtx), first);
2246 /* Set up the stack and frame (if desired) for the function. */
2248 void
2249 xtensa_function_prologue (file, size)
2250 FILE *file;
2251 int size ATTRIBUTE_UNUSED;
2253 unsigned long tsize = compute_frame_size (get_frame_size ());
2255 if (frame_pointer_needed)
2256 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2257 else
2258 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2261 if (tsize < (1 << (12+3)))
2263 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2265 else
2267 fprintf (file, "\tentry\tsp, 16\n");
2269 /* use a8 as a temporary since a0-a7 may be live */
2270 fprintf (file, "\tl32r\ta8, ");
2271 print_operand (file, frame_size_const, 0);
2272 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2273 fprintf (file, "\tmovsp\tsp, a8\n");
2278 /* Do any necessary cleanup after a function to restore
2279 stack, frame, and regs. */
2281 void
2282 xtensa_function_epilogue (file, size)
2283 FILE *file;
2284 int size ATTRIBUTE_UNUSED;
2286 rtx insn = get_last_insn ();
2287 /* If the last insn was a BARRIER, we don't have to write anything. */
2288 if (GET_CODE (insn) == NOTE)
2289 insn = prev_nonnote_insn (insn);
2290 if (insn == 0 || GET_CODE (insn) != BARRIER)
2291 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2293 xtensa_current_frame_size = 0;
2297 /* Create the va_list data type.
2298 This structure is set up by __builtin_saveregs. The __va_reg
2299 field points to a stack-allocated region holding the contents of the
2300 incoming argument registers. The __va_ndx field is an index initialized
2301 to the position of the first unnamed (variable) argument. This same index
2302 is also used to address the arguments passed in memory. Thus, the
2303 __va_stk field is initialized to point to the position of the first
2304 argument in memory offset to account for the arguments passed in
2305 registers. E.G., if there are 6 argument registers, and each register is
2306 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2307 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2308 argument word N for N >= 6. */
2310 tree
2311 xtensa_build_va_list (void)
2313 tree f_stk, f_reg, f_ndx, record;
2315 record = make_node (RECORD_TYPE);
2317 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2318 ptr_type_node);
2319 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2320 ptr_type_node);
2321 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2322 integer_type_node);
2324 DECL_FIELD_CONTEXT (f_stk) = record;
2325 DECL_FIELD_CONTEXT (f_reg) = record;
2326 DECL_FIELD_CONTEXT (f_ndx) = record;
2328 TYPE_FIELDS (record) = f_stk;
2329 TREE_CHAIN (f_stk) = f_reg;
2330 TREE_CHAIN (f_reg) = f_ndx;
2332 layout_type (record);
2333 return record;
2337 /* Save the incoming argument registers on the stack. Returns the
2338 address of the saved registers. */
2341 xtensa_builtin_saveregs ()
2343 rtx gp_regs, dest;
2344 int arg_words = current_function_arg_words;
2345 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2346 int i;
2348 if (gp_left == 0)
2349 return const0_rtx;
2351 /* allocate the general-purpose register space */
2352 gp_regs = assign_stack_local
2353 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2354 MEM_IN_STRUCT_P (gp_regs) = 1;
2355 RTX_UNCHANGING_P (gp_regs) = 1;
2356 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2358 /* Now store the incoming registers. */
2359 dest = change_address (gp_regs, SImode,
2360 plus_constant (XEXP (gp_regs, 0),
2361 arg_words * UNITS_PER_WORD));
2363 /* Note: Don't use move_block_from_reg() here because the incoming
2364 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2365 Instead, call gen_raw_REG() directly so that we get a distinct
2366 instance of (REG:SI 7). */
2367 for (i = 0; i < gp_left; i++)
2369 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2370 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2373 return XEXP (gp_regs, 0);
2377 /* Implement `va_start' for varargs and stdarg. We look at the
2378 current function to fill in an initial va_list. */
2380 void
2381 xtensa_va_start (stdarg_p, valist, nextarg)
2382 int stdarg_p ATTRIBUTE_UNUSED;
2383 tree valist;
2384 rtx nextarg ATTRIBUTE_UNUSED;
2386 tree f_stk, stk;
2387 tree f_reg, reg;
2388 tree f_ndx, ndx;
2389 tree t, u;
2390 int arg_words;
2392 arg_words = current_function_args_info.arg_words;
2394 f_stk = TYPE_FIELDS (va_list_type_node);
2395 f_reg = TREE_CHAIN (f_stk);
2396 f_ndx = TREE_CHAIN (f_reg);
2398 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2399 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2400 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2402 /* Call __builtin_saveregs; save the result in __va_reg */
2403 current_function_arg_words = arg_words;
2404 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2405 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2406 TREE_SIDE_EFFECTS (t) = 1;
2407 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2409 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2410 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2411 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2412 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2413 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2414 TREE_SIDE_EFFECTS (t) = 1;
2415 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2417 /* Set the __va_ndx member. */
2418 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2419 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2420 TREE_SIDE_EFFECTS (t) = 1;
2421 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2425 /* Implement `va_arg'. */
2428 xtensa_va_arg (valist, type)
2429 tree valist, type;
2431 tree f_stk, stk;
2432 tree f_reg, reg;
2433 tree f_ndx, ndx;
2434 tree tmp, addr_tree, type_size;
2435 rtx array, orig_ndx, r, addr, size, va_size;
2436 rtx lab_false, lab_over, lab_false2;
2438 f_stk = TYPE_FIELDS (va_list_type_node);
2439 f_reg = TREE_CHAIN (f_stk);
2440 f_ndx = TREE_CHAIN (f_reg);
2442 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2443 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2444 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2446 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2448 va_size = gen_reg_rtx (SImode);
2449 tmp = fold (build (MULT_EXPR, sizetype,
2450 fold (build (TRUNC_DIV_EXPR, sizetype,
2451 fold (build (PLUS_EXPR, sizetype,
2452 type_size,
2453 size_int (UNITS_PER_WORD - 1))),
2454 size_int (UNITS_PER_WORD))),
2455 size_int (UNITS_PER_WORD)));
2456 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2457 if (r != va_size)
2458 emit_move_insn (va_size, r);
2461 /* First align __va_ndx to a double word boundary if necessary for this arg:
2463 if (__alignof__ (TYPE) > 4)
2464 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2467 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2469 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2470 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2471 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2472 build_int_2 (-2 * UNITS_PER_WORD, -1));
2473 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2474 TREE_SIDE_EFFECTS (tmp) = 1;
2475 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2479 /* Increment __va_ndx to point past the argument:
2481 orig_ndx = (AP).__va_ndx;
2482 (AP).__va_ndx += __va_size (TYPE);
2485 orig_ndx = gen_reg_rtx (SImode);
2486 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2487 if (r != orig_ndx)
2488 emit_move_insn (orig_ndx, r);
2490 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2491 make_tree (intSI_type_node, va_size));
2492 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2493 TREE_SIDE_EFFECTS (tmp) = 1;
2494 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2497 /* Check if the argument is in registers:
2499 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2500 __array = (AP).__va_reg;
2503 lab_false = gen_label_rtx ();
2504 lab_over = gen_label_rtx ();
2505 array = gen_reg_rtx (Pmode);
2507 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode, EXPAND_NORMAL),
2508 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2509 GT, const1_rtx, SImode, 0, lab_false);
2511 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2512 if (r != array)
2513 emit_move_insn (array, r);
2515 emit_jump_insn (gen_jump (lab_over));
2516 emit_barrier ();
2517 emit_label (lab_false);
2520 /* ...otherwise, the argument is on the stack (never split between
2521 registers and the stack -- change __va_ndx if necessary):
2523 else
2525 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2526 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2527 __array = (AP).__va_stk;
2531 lab_false2 = gen_label_rtx ();
2532 emit_cmp_and_jump_insns (orig_ndx,
2533 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2534 GE, const1_rtx, SImode, 0, lab_false2);
2536 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2537 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2538 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2539 TREE_SIDE_EFFECTS (tmp) = 1;
2540 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2542 emit_label (lab_false2);
2544 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2545 if (r != array)
2546 emit_move_insn (array, r);
2548 emit_label (lab_over);
2551 /* Given the base array pointer (__array) and index to the subsequent
2552 argument (__va_ndx), find the address:
2554 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2555 ? sizeof (TYPE)
2556 : __va_size (TYPE))
2558 The results are endian-dependent because values smaller than one word
2559 are aligned differently.
2562 size = gen_reg_rtx (SImode);
2563 emit_move_insn (size, va_size);
2565 if (BYTES_BIG_ENDIAN)
2567 rtx lab_use_va_size = gen_label_rtx ();
2569 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2570 EXPAND_NORMAL),
2571 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2572 GE, const1_rtx, SImode, 0, lab_use_va_size);
2574 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2575 if (r != size)
2576 emit_move_insn (size, r);
2578 emit_label (lab_use_va_size);
2581 addr_tree = build (PLUS_EXPR, ptr_type_node,
2582 make_tree (ptr_type_node, array),
2583 ndx);
2584 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2585 make_tree (intSI_type_node, size));
2586 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2587 addr = copy_to_reg (addr);
2588 return addr;
2592 enum reg_class
2593 xtensa_preferred_reload_class (x, class)
2594 rtx x;
2595 enum reg_class class;
2597 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2598 return NO_REGS;
2600 /* Don't use sp for reloads! */
2601 if (class == AR_REGS)
2602 return GR_REGS;
2604 return class;
2608 enum reg_class
2609 xtensa_secondary_reload_class (class, mode, x, isoutput)
2610 enum reg_class class;
2611 enum machine_mode mode ATTRIBUTE_UNUSED;
2612 rtx x;
2613 int isoutput;
2615 int regno;
2617 if (GET_CODE (x) == SIGN_EXTEND)
2618 x = XEXP (x, 0);
2619 regno = xt_true_regnum (x);
2621 if (!isoutput)
2623 if (class == FP_REGS && constantpool_mem_p (x))
2624 return GR_REGS;
2627 if (ACC_REG_P (regno))
2628 return (class == GR_REGS ? NO_REGS : GR_REGS);
2629 if (class == ACC_REG)
2630 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2632 return NO_REGS;
2636 void
2637 order_regs_for_local_alloc ()
2639 if (!leaf_function_p ())
2641 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2642 FIRST_PSEUDO_REGISTER * sizeof (int));
2644 else
2646 int i, num_arg_regs;
2647 int nxt = 0;
2649 /* use the AR registers in increasing order (skipping a0 and a1)
2650 but save the incoming argument registers for a last resort */
2651 num_arg_regs = current_function_args_info.arg_words;
2652 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2653 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2654 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2655 reg_alloc_order[nxt++] = i + num_arg_regs;
2656 for (i = 0; i < num_arg_regs; i++)
2657 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2659 /* list the FP registers in order for now */
2660 for (i = 0; i < 16; i++)
2661 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2663 /* GCC requires that we list *all* the registers.... */
2664 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2665 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2666 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2667 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2669 /* list the coprocessor registers in order */
2670 for (i = 0; i < BR_REG_NUM; i++)
2671 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2673 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2678 /* A customized version of reg_overlap_mentioned_p that only looks for
2679 references to a7 (as opposed to hard_frame_pointer_rtx). */
2682 a7_overlap_mentioned_p (x)
2683 rtx x;
2685 int i, j;
2686 unsigned int x_regno;
2687 const char *fmt;
2689 if (GET_CODE (x) == REG)
2691 x_regno = REGNO (x);
2692 return (x != hard_frame_pointer_rtx
2693 && x_regno < A7_REG + 1
2694 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2697 if (GET_CODE (x) == SUBREG
2698 && GET_CODE (SUBREG_REG (x)) == REG
2699 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2701 x_regno = subreg_regno (x);
2702 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2703 && x_regno < A7_REG + 1
2704 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2707 /* X does not match, so try its subexpressions. */
2708 fmt = GET_RTX_FORMAT (GET_CODE (x));
2709 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2711 if (fmt[i] == 'e')
2713 if (a7_overlap_mentioned_p (XEXP (x, i)))
2714 return 1;
2716 else if (fmt[i] == 'E')
2718 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2719 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2720 return 1;
2724 return 0;