* config/xtensa/xtensa.md (set_frame_ptr): Change rtl to set reg a7.
[official-gcc.git] / gcc / config / xtensa / xtensa.c
blobf2546288ed9123b522463b2061033cf8a20fa682
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "function.h"
44 #include "toplev.h"
45 #include "optabs.h"
46 #include "output.h"
47 #include "libfuncs.h"
48 #include "ggc.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 /* Enumeration for all of the relational tests, so that we can build
54 arrays indexed by the test type, and not worry about the order
55 of EQ, NE, etc. */
57 enum internal_test {
58 ITEST_EQ,
59 ITEST_NE,
60 ITEST_GT,
61 ITEST_GE,
62 ITEST_LT,
63 ITEST_LE,
64 ITEST_GTU,
65 ITEST_GEU,
66 ITEST_LTU,
67 ITEST_LEU,
68 ITEST_MAX
71 /* Cached operands, and operator to compare for use in set/branch on
72 condition codes. */
73 rtx branch_cmp[2];
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
99 treatment. */
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 ACC_REG,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero PARAMS ((int));
194 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
195 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
196 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
197 static rtx gen_conditional_move PARAMS ((rtx));
198 static rtx fixup_subreg_mem PARAMS ((rtx x));
199 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
200 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
201 static void printx PARAMS ((FILE *, signed int));
202 static unsigned int xtensa_multibss_section_type_flags
203 PARAMS ((tree, const char *, int));
204 static void xtensa_select_rtx_section
205 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
206 static void xtensa_encode_section_info PARAMS ((tree, int));
207 static bool xtensa_rtx_costs PARAMS ((rtx, int, int, int *));
209 static rtx frame_size_const;
210 static int current_function_arg_words;
211 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
212 REG_ALLOC_ORDER;
214 /* This macro generates the assembly code for function entry.
215 FILE is a stdio stream to output the code to.
216 SIZE is an int: how many units of temporary storage to allocate.
217 Refer to the array 'regs_ever_live' to determine which registers
218 to save; 'regs_ever_live[I]' is nonzero if register number I
219 is ever used in the function. This macro is responsible for
220 knowing which registers should not be saved even if used. */
222 #undef TARGET_ASM_FUNCTION_PROLOGUE
223 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
225 /* This macro generates the assembly code for function exit,
226 on machines that need it. If FUNCTION_EPILOGUE is not defined
227 then individual return instructions are generated for each
228 return statement. Args are same as for FUNCTION_PROLOGUE. */
230 #undef TARGET_ASM_FUNCTION_EPILOGUE
231 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
233 /* These hooks specify assembly directives for creating certain kinds
234 of integer object. */
236 #undef TARGET_ASM_ALIGNED_SI_OP
237 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
239 #undef TARGET_ASM_SELECT_RTX_SECTION
240 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
241 #undef TARGET_ENCODE_SECTION_INFO
242 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
244 #undef TARGET_RTX_COSTS
245 #define TARGET_RTX_COSTS xtensa_rtx_costs
246 #undef TARGET_ADDRESS_COST
247 #define TARGET_ADDRESS_COST hook_int_rtx_0
249 struct gcc_target targetm = TARGET_INITIALIZER;
253 * Functions to test Xtensa immediate operand validity.
257 xtensa_b4constu (v)
258 int v;
260 switch (v)
262 case 32768:
263 case 65536:
264 case 2:
265 case 3:
266 case 4:
267 case 5:
268 case 6:
269 case 7:
270 case 8:
271 case 10:
272 case 12:
273 case 16:
274 case 32:
275 case 64:
276 case 128:
277 case 256:
278 return 1;
280 return 0;
284 xtensa_simm8x256 (v)
285 int v;
287 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
291 xtensa_ai4const (v)
292 int v;
294 return (v == -1 || (v >= 1 && v <= 15));
298 xtensa_simm7 (v)
299 int v;
301 return v >= -32 && v <= 95;
305 xtensa_b4const (v)
306 int v;
308 switch (v)
310 case -1:
311 case 1:
312 case 2:
313 case 3:
314 case 4:
315 case 5:
316 case 6:
317 case 7:
318 case 8:
319 case 10:
320 case 12:
321 case 16:
322 case 32:
323 case 64:
324 case 128:
325 case 256:
326 return 1;
328 return 0;
332 xtensa_simm8 (v)
333 int v;
335 return v >= -128 && v <= 127;
339 xtensa_tp7 (v)
340 int v;
342 return (v >= 7 && v <= 22);
346 xtensa_lsi4x4 (v)
347 int v;
349 return (v & 3) == 0 && (v >= 0 && v <= 60);
353 xtensa_simm12b (v)
354 int v;
356 return v >= -2048 && v <= 2047;
360 xtensa_uimm8 (v)
361 int v;
363 return v >= 0 && v <= 255;
367 xtensa_uimm8x2 (v)
368 int v;
370 return (v & 1) == 0 && (v >= 0 && v <= 510);
374 xtensa_uimm8x4 (v)
375 int v;
377 return (v & 3) == 0 && (v >= 0 && v <= 1020);
381 /* This is just like the standard true_regnum() function except that it
382 works even when reg_renumber is not initialized. */
385 xt_true_regnum (x)
386 rtx x;
388 if (GET_CODE (x) == REG)
390 if (reg_renumber
391 && REGNO (x) >= FIRST_PSEUDO_REGISTER
392 && reg_renumber[REGNO (x)] >= 0)
393 return reg_renumber[REGNO (x)];
394 return REGNO (x);
396 if (GET_CODE (x) == SUBREG)
398 int base = xt_true_regnum (SUBREG_REG (x));
399 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
400 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
401 GET_MODE (SUBREG_REG (x)),
402 SUBREG_BYTE (x), GET_MODE (x));
404 return -1;
409 add_operand (op, mode)
410 rtx op;
411 enum machine_mode mode;
413 if (GET_CODE (op) == CONST_INT)
414 return (xtensa_simm8 (INTVAL (op)) ||
415 xtensa_simm8x256 (INTVAL (op)));
417 return register_operand (op, mode);
422 arith_operand (op, mode)
423 rtx op;
424 enum machine_mode mode;
426 if (GET_CODE (op) == CONST_INT)
427 return xtensa_simm8 (INTVAL (op));
429 return register_operand (op, mode);
434 nonimmed_operand (op, mode)
435 rtx op;
436 enum machine_mode mode;
438 /* We cannot use the standard nonimmediate_operand() predicate because
439 it includes constant pool memory operands. */
441 if (memory_operand (op, mode))
442 return !constantpool_address_p (XEXP (op, 0));
444 return register_operand (op, mode);
449 mem_operand (op, mode)
450 rtx op;
451 enum machine_mode mode;
453 /* We cannot use the standard memory_operand() predicate because
454 it includes constant pool memory operands. */
456 if (memory_operand (op, mode))
457 return !constantpool_address_p (XEXP (op, 0));
459 return FALSE;
464 xtensa_valid_move (mode, operands)
465 enum machine_mode mode;
466 rtx *operands;
468 /* Either the destination or source must be a register, and the
469 MAC16 accumulator doesn't count. */
471 if (register_operand (operands[0], mode))
473 int dst_regnum = xt_true_regnum (operands[0]);
475 /* The stack pointer can only be assigned with a MOVSP opcode. */
476 if (dst_regnum == STACK_POINTER_REGNUM)
477 return (mode == SImode
478 && register_operand (operands[1], mode)
479 && !ACC_REG_P (xt_true_regnum (operands[1])));
481 if (!ACC_REG_P (dst_regnum))
482 return true;
484 if (register_operand (operands[1], mode))
486 int src_regnum = xt_true_regnum (operands[1]);
487 if (!ACC_REG_P (src_regnum))
488 return true;
490 return FALSE;
495 mask_operand (op, mode)
496 rtx op;
497 enum machine_mode mode;
499 if (GET_CODE (op) == CONST_INT)
500 return xtensa_mask_immediate (INTVAL (op));
502 return register_operand (op, mode);
507 extui_fldsz_operand (op, mode)
508 rtx op;
509 enum machine_mode mode ATTRIBUTE_UNUSED;
511 return ((GET_CODE (op) == CONST_INT)
512 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
517 sext_operand (op, mode)
518 rtx op;
519 enum machine_mode mode;
521 if (TARGET_SEXT)
522 return nonimmed_operand (op, mode);
523 return mem_operand (op, mode);
528 sext_fldsz_operand (op, mode)
529 rtx op;
530 enum machine_mode mode ATTRIBUTE_UNUSED;
532 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
537 lsbitnum_operand (op, mode)
538 rtx op;
539 enum machine_mode mode ATTRIBUTE_UNUSED;
541 if (GET_CODE (op) == CONST_INT)
543 return (BITS_BIG_ENDIAN
544 ? (INTVAL (op) == BITS_PER_WORD-1)
545 : (INTVAL (op) == 0));
547 return FALSE;
551 static int
552 b4const_or_zero (v)
553 int v;
555 if (v == 0)
556 return TRUE;
557 return xtensa_b4const (v);
562 branch_operand (op, mode)
563 rtx op;
564 enum machine_mode mode;
566 if (GET_CODE (op) == CONST_INT)
567 return b4const_or_zero (INTVAL (op));
569 return register_operand (op, mode);
574 ubranch_operand (op, mode)
575 rtx op;
576 enum machine_mode mode;
578 if (GET_CODE (op) == CONST_INT)
579 return xtensa_b4constu (INTVAL (op));
581 return register_operand (op, mode);
586 call_insn_operand (op, mode)
587 rtx op;
588 enum machine_mode mode ATTRIBUTE_UNUSED;
590 if ((GET_CODE (op) == REG)
591 && (op != arg_pointer_rtx)
592 && ((REGNO (op) < FRAME_POINTER_REGNUM)
593 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
594 return TRUE;
596 if (CONSTANT_ADDRESS_P (op))
598 /* Direct calls only allowed to static functions with PIC. */
599 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
600 && SYMBOL_REF_FLAG (op)));
603 return FALSE;
608 move_operand (op, mode)
609 rtx op;
610 enum machine_mode mode;
612 if (register_operand (op, mode))
613 return TRUE;
615 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
616 result in 0/1. */
617 if (GET_CODE (op) == CONSTANT_P_RTX)
618 return TRUE;
620 if (GET_CODE (op) == CONST_INT)
621 return xtensa_simm12b (INTVAL (op));
623 if (GET_CODE (op) == MEM)
624 return memory_address_p (mode, XEXP (op, 0));
626 return FALSE;
631 smalloffset_mem_p (op)
632 rtx op;
634 if (GET_CODE (op) == MEM)
636 rtx addr = XEXP (op, 0);
637 if (GET_CODE (addr) == REG)
638 return REG_OK_FOR_BASE_P (addr);
639 if (GET_CODE (addr) == PLUS)
641 rtx offset = XEXP (addr, 0);
642 if (GET_CODE (offset) != CONST_INT)
643 offset = XEXP (addr, 1);
644 if (GET_CODE (offset) != CONST_INT)
645 return FALSE;
646 return xtensa_lsi4x4 (INTVAL (offset));
649 return FALSE;
654 smalloffset_double_mem_p (op)
655 rtx op;
657 if (!smalloffset_mem_p (op))
658 return FALSE;
659 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
664 constantpool_address_p (addr)
665 rtx addr;
667 rtx sym = addr;
669 if (GET_CODE (addr) == CONST)
671 rtx offset;
673 /* only handle (PLUS (SYM, OFFSET)) form */
674 addr = XEXP (addr, 0);
675 if (GET_CODE (addr) != PLUS)
676 return FALSE;
678 /* make sure the address is word aligned */
679 offset = XEXP (addr, 1);
680 if ((GET_CODE (offset) != CONST_INT)
681 || ((INTVAL (offset) & 3) != 0))
682 return FALSE;
684 sym = XEXP (addr, 0);
687 if ((GET_CODE (sym) == SYMBOL_REF)
688 && CONSTANT_POOL_ADDRESS_P (sym))
689 return TRUE;
690 return FALSE;
695 constantpool_mem_p (op)
696 rtx op;
698 if (GET_CODE (op) == MEM)
699 return constantpool_address_p (XEXP (op, 0));
700 return FALSE;
705 non_const_move_operand (op, mode)
706 rtx op;
707 enum machine_mode mode;
709 if (register_operand (op, mode))
710 return 1;
711 if (GET_CODE (op) == SUBREG)
712 op = SUBREG_REG (op);
713 if (GET_CODE (op) == MEM)
714 return memory_address_p (mode, XEXP (op, 0));
715 return FALSE;
719 /* Accept the floating point constant 1 in the appropriate mode. */
722 const_float_1_operand (op, mode)
723 rtx op;
724 enum machine_mode mode;
726 REAL_VALUE_TYPE d;
727 static REAL_VALUE_TYPE onedf;
728 static REAL_VALUE_TYPE onesf;
729 static int one_initialized;
731 if ((GET_CODE (op) != CONST_DOUBLE)
732 || (mode != GET_MODE (op))
733 || (mode != DFmode && mode != SFmode))
734 return FALSE;
736 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
738 if (! one_initialized)
740 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
741 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
742 one_initialized = TRUE;
745 if (mode == DFmode)
746 return REAL_VALUES_EQUAL (d, onedf);
747 else
748 return REAL_VALUES_EQUAL (d, onesf);
753 fpmem_offset_operand (op, mode)
754 rtx op;
755 enum machine_mode mode ATTRIBUTE_UNUSED;
757 if (GET_CODE (op) == CONST_INT)
758 return xtensa_mem_offset (INTVAL (op), SFmode);
759 return 0;
763 void
764 xtensa_extend_reg (dst, src)
765 rtx dst;
766 rtx src;
768 rtx temp = gen_reg_rtx (SImode);
769 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
771 /* generate paradoxical subregs as needed so that the modes match */
772 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
773 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
775 emit_insn (gen_ashlsi3 (temp, src, shift));
776 emit_insn (gen_ashrsi3 (dst, temp, shift));
780 void
781 xtensa_load_constant (dst, src)
782 rtx dst;
783 rtx src;
785 enum machine_mode mode = GET_MODE (dst);
786 src = force_const_mem (SImode, src);
788 /* PC-relative loads are always SImode so we have to add a SUBREG if that
789 is not the desired mode */
791 if (mode != SImode)
793 if (register_operand (dst, mode))
794 dst = simplify_gen_subreg (SImode, dst, mode, 0);
795 else
797 src = force_reg (SImode, src);
798 src = gen_lowpart_SUBREG (mode, src);
802 emit_move_insn (dst, src);
807 branch_operator (x, mode)
808 rtx x;
809 enum machine_mode mode;
811 if (GET_MODE (x) != mode)
812 return FALSE;
814 switch (GET_CODE (x))
816 case EQ:
817 case NE:
818 case LT:
819 case GE:
820 return TRUE;
821 default:
822 break;
824 return FALSE;
829 ubranch_operator (x, mode)
830 rtx x;
831 enum machine_mode mode;
833 if (GET_MODE (x) != mode)
834 return FALSE;
836 switch (GET_CODE (x))
838 case LTU:
839 case GEU:
840 return TRUE;
841 default:
842 break;
844 return FALSE;
849 boolean_operator (x, mode)
850 rtx x;
851 enum machine_mode mode;
853 if (GET_MODE (x) != mode)
854 return FALSE;
856 switch (GET_CODE (x))
858 case EQ:
859 case NE:
860 return TRUE;
861 default:
862 break;
864 return FALSE;
869 xtensa_mask_immediate (v)
870 int v;
872 #define MAX_MASK_SIZE 16
873 int mask_size;
875 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
877 if ((v & 1) == 0)
878 return FALSE;
879 v = v >> 1;
880 if (v == 0)
881 return TRUE;
884 return FALSE;
889 xtensa_mem_offset (v, mode)
890 unsigned v;
891 enum machine_mode mode;
893 switch (mode)
895 case BLKmode:
896 /* Handle the worst case for block moves. See xtensa_expand_block_move
897 where we emit an optimized block move operation if the block can be
898 moved in < "move_ratio" pieces. The worst case is when the block is
899 aligned but has a size of (3 mod 4) (does this happen?) so that the
900 last piece requires a byte load/store. */
901 return (xtensa_uimm8 (v) &&
902 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
904 case QImode:
905 return xtensa_uimm8 (v);
907 case HImode:
908 return xtensa_uimm8x2 (v);
910 case DFmode:
911 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
913 default:
914 break;
917 return xtensa_uimm8x4 (v);
921 /* Make normal rtx_code into something we can index from an array */
923 static enum internal_test
924 map_test_to_internal_test (test_code)
925 enum rtx_code test_code;
927 enum internal_test test = ITEST_MAX;
929 switch (test_code)
931 default: break;
932 case EQ: test = ITEST_EQ; break;
933 case NE: test = ITEST_NE; break;
934 case GT: test = ITEST_GT; break;
935 case GE: test = ITEST_GE; break;
936 case LT: test = ITEST_LT; break;
937 case LE: test = ITEST_LE; break;
938 case GTU: test = ITEST_GTU; break;
939 case GEU: test = ITEST_GEU; break;
940 case LTU: test = ITEST_LTU; break;
941 case LEU: test = ITEST_LEU; break;
944 return test;
948 /* Generate the code to compare two integer values. The return value is
949 the comparison expression. */
951 static rtx
952 gen_int_relational (test_code, cmp0, cmp1, p_invert)
953 enum rtx_code test_code; /* relational test (EQ, etc) */
954 rtx cmp0; /* first operand to compare */
955 rtx cmp1; /* second operand to compare */
956 int *p_invert; /* whether branch needs to reverse its test */
958 struct cmp_info {
959 enum rtx_code test_code; /* test code to use in insn */
960 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
961 int const_add; /* constant to add (convert LE -> LT) */
962 int reverse_regs; /* reverse registers in test */
963 int invert_const; /* != 0 if invert value if cmp1 is constant */
964 int invert_reg; /* != 0 if invert value if cmp1 is register */
965 int unsignedp; /* != 0 for unsigned comparisons. */
968 static struct cmp_info info[ (int)ITEST_MAX ] = {
970 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
971 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
973 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
974 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
975 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
976 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
978 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
979 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
980 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
981 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
984 enum internal_test test;
985 enum machine_mode mode;
986 struct cmp_info *p_info;
988 test = map_test_to_internal_test (test_code);
989 if (test == ITEST_MAX)
990 abort ();
992 p_info = &info[ (int)test ];
994 mode = GET_MODE (cmp0);
995 if (mode == VOIDmode)
996 mode = GET_MODE (cmp1);
998 /* Make sure we can handle any constants given to us. */
999 if (GET_CODE (cmp1) == CONST_INT)
1001 HOST_WIDE_INT value = INTVAL (cmp1);
1002 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
1004 /* if the immediate overflows or does not fit in the immediate field,
1005 spill it to a register */
1007 if ((p_info->unsignedp ?
1008 (uvalue + p_info->const_add > uvalue) :
1009 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1011 cmp1 = force_reg (mode, cmp1);
1013 else if (!(p_info->const_range_p) (value + p_info->const_add))
1015 cmp1 = force_reg (mode, cmp1);
1018 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1020 cmp1 = force_reg (mode, cmp1);
1023 /* See if we need to invert the result. */
1024 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1025 ? p_info->invert_const
1026 : p_info->invert_reg);
1028 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1029 Comparison between two registers, may involve switching operands. */
1030 if (GET_CODE (cmp1) == CONST_INT)
1032 if (p_info->const_add != 0)
1033 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1036 else if (p_info->reverse_regs)
1038 rtx temp = cmp0;
1039 cmp0 = cmp1;
1040 cmp1 = temp;
1043 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1047 /* Generate the code to compare two float values. The return value is
1048 the comparison expression. */
1050 static rtx
1051 gen_float_relational (test_code, cmp0, cmp1)
1052 enum rtx_code test_code; /* relational test (EQ, etc) */
1053 rtx cmp0; /* first operand to compare */
1054 rtx cmp1; /* second operand to compare */
1056 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1057 rtx brtmp;
1058 int reverse_regs, invert;
1060 switch (test_code)
1062 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1063 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1064 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1065 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1066 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1067 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1068 default:
1069 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1070 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1073 if (reverse_regs)
1075 rtx temp = cmp0;
1076 cmp0 = cmp1;
1077 cmp1 = temp;
1080 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1081 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1083 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1087 void
1088 xtensa_expand_conditional_branch (operands, test_code)
1089 rtx *operands;
1090 enum rtx_code test_code;
1092 enum cmp_type type = branch_type;
1093 rtx cmp0 = branch_cmp[0];
1094 rtx cmp1 = branch_cmp[1];
1095 rtx cmp;
1096 int invert;
1097 rtx label1, label2;
1099 switch (type)
1101 case CMP_DF:
1102 default:
1103 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1105 case CMP_SI:
1106 invert = FALSE;
1107 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1108 break;
1110 case CMP_SF:
1111 if (!TARGET_HARD_FLOAT)
1112 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1113 invert = FALSE;
1114 cmp = gen_float_relational (test_code, cmp0, cmp1);
1115 break;
1118 /* Generate the branch. */
1120 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1121 label2 = pc_rtx;
1123 if (invert)
1125 label2 = label1;
1126 label1 = pc_rtx;
1129 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1130 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1131 label1,
1132 label2)));
1136 static rtx
1137 gen_conditional_move (cmp)
1138 rtx cmp;
1140 enum rtx_code code = GET_CODE (cmp);
1141 rtx op0 = branch_cmp[0];
1142 rtx op1 = branch_cmp[1];
1144 if (branch_type == CMP_SI)
1146 /* Jump optimization calls get_condition() which canonicalizes
1147 comparisons like (GE x <const>) to (GT x <const-1>).
1148 Transform those comparisons back to GE, since that is the
1149 comparison supported in Xtensa. We shouldn't have to
1150 transform <LE x const> comparisons, because neither
1151 xtensa_expand_conditional_branch() nor get_condition() will
1152 produce them. */
1154 if ((code == GT) && (op1 == constm1_rtx))
1156 code = GE;
1157 op1 = const0_rtx;
1159 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1161 if (boolean_operator (cmp, VOIDmode))
1163 /* swap the operands to make const0 second */
1164 if (op0 == const0_rtx)
1166 op0 = op1;
1167 op1 = const0_rtx;
1170 /* if not comparing against zero, emit a comparison (subtract) */
1171 if (op1 != const0_rtx)
1173 op0 = expand_binop (SImode, sub_optab, op0, op1,
1174 0, 0, OPTAB_LIB_WIDEN);
1175 op1 = const0_rtx;
1178 else if (branch_operator (cmp, VOIDmode))
1180 /* swap the operands to make const0 second */
1181 if (op0 == const0_rtx)
1183 op0 = op1;
1184 op1 = const0_rtx;
1186 switch (code)
1188 case LT: code = GE; break;
1189 case GE: code = LT; break;
1190 default: abort ();
1194 if (op1 != const0_rtx)
1195 return 0;
1197 else
1198 return 0;
1200 return gen_rtx (code, VOIDmode, op0, op1);
1203 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1204 return gen_float_relational (code, op0, op1);
1206 return 0;
1211 xtensa_expand_conditional_move (operands, isflt)
1212 rtx *operands;
1213 int isflt;
1215 rtx cmp;
1216 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1218 if (!(cmp = gen_conditional_move (operands[1])))
1219 return 0;
1221 if (isflt)
1222 gen_fn = (branch_type == CMP_SI
1223 ? gen_movsfcc_internal0
1224 : gen_movsfcc_internal1);
1225 else
1226 gen_fn = (branch_type == CMP_SI
1227 ? gen_movsicc_internal0
1228 : gen_movsicc_internal1);
1230 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1231 operands[2], operands[3], cmp));
1232 return 1;
1237 xtensa_expand_scc (operands)
1238 rtx *operands;
1240 rtx dest = operands[0];
1241 rtx cmp = operands[1];
1242 rtx one_tmp, zero_tmp;
1243 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1245 if (!(cmp = gen_conditional_move (cmp)))
1246 return 0;
1248 one_tmp = gen_reg_rtx (SImode);
1249 zero_tmp = gen_reg_rtx (SImode);
1250 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1251 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1253 gen_fn = (branch_type == CMP_SI
1254 ? gen_movsicc_internal0
1255 : gen_movsicc_internal1);
1256 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1257 return 1;
1261 /* Emit insns to move operands[1] into operands[0].
1263 Return 1 if we have written out everything that needs to be done to
1264 do the move. Otherwise, return 0 and the caller will emit the move
1265 normally. */
1268 xtensa_emit_move_sequence (operands, mode)
1269 rtx *operands;
1270 enum machine_mode mode;
1272 if (CONSTANT_P (operands[1])
1273 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1274 && (GET_CODE (operands[1]) != CONST_INT
1275 || !xtensa_simm12b (INTVAL (operands[1]))))
1277 xtensa_load_constant (operands[0], operands[1]);
1278 return 1;
1281 if (!(reload_in_progress | reload_completed))
1283 if (!xtensa_valid_move (mode, operands))
1284 operands[1] = force_reg (mode, operands[1]);
1286 if (xtensa_copy_incoming_a7 (operands, mode))
1287 return 1;
1290 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1291 instruction won't be recognized after reload. So we remove the
1292 subreg and adjust mem accordingly. */
1293 if (reload_in_progress)
1295 operands[0] = fixup_subreg_mem (operands[0]);
1296 operands[1] = fixup_subreg_mem (operands[1]);
1298 return 0;
1301 static rtx
1302 fixup_subreg_mem (x)
1303 rtx x;
1305 if (GET_CODE (x) == SUBREG
1306 && GET_CODE (SUBREG_REG (x)) == REG
1307 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1309 rtx temp =
1310 gen_rtx_SUBREG (GET_MODE (x),
1311 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1312 SUBREG_BYTE (x));
1313 x = alter_subreg (&temp);
1315 return x;
1319 /* Check if this move is copying an incoming argument in a7. If so,
1320 emit the move, followed by the special "set_frame_ptr"
1321 unspec_volatile insn, at the very beginning of the function. This
1322 is necessary because the register allocator will ignore conflicts
1323 with a7 and may assign some other pseudo to a7. If that pseudo was
1324 assigned prior to this move, it would clobber the incoming argument
1325 in a7. By copying the argument out of a7 as the very first thing,
1326 and then immediately following that with an unspec_volatile to keep
1327 the scheduler away, we should avoid any problems. */
1329 bool
1330 xtensa_copy_incoming_a7 (operands, mode)
1331 rtx *operands;
1332 enum machine_mode mode;
1334 if (a7_overlap_mentioned_p (operands[1])
1335 && !cfun->machine->incoming_a7_copied)
1337 rtx mov;
1338 switch (mode)
1340 case DFmode:
1341 mov = gen_movdf_internal (operands[0], operands[1]);
1342 break;
1343 case SFmode:
1344 mov = gen_movsf_internal (operands[0], operands[1]);
1345 break;
1346 case DImode:
1347 mov = gen_movdi_internal (operands[0], operands[1]);
1348 break;
1349 case SImode:
1350 mov = gen_movsi_internal (operands[0], operands[1]);
1351 break;
1352 case HImode:
1353 mov = gen_movhi_internal (operands[0], operands[1]);
1354 break;
1355 case QImode:
1356 mov = gen_movqi_internal (operands[0], operands[1]);
1357 break;
1358 default:
1359 abort ();
1362 /* Insert the instructions before any other argument copies.
1363 (The set_frame_ptr insn comes _after_ the move, so push it
1364 out first.) */
1365 push_topmost_sequence ();
1366 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1367 emit_insn_after (mov, get_insns ());
1368 pop_topmost_sequence ();
1370 /* Ideally the incoming argument in a7 would only be copied
1371 once, since propagating a7 into the body of a function
1372 will almost certainly lead to errors. However, there is
1373 at least one harmless case (in GCSE) where the original
1374 copy from a7 is changed to copy into a new pseudo. Thus,
1375 we use a flag to only do this special treatment for the
1376 first copy of a7. */
1378 cfun->machine->incoming_a7_copied = true;
1380 return 1;
1383 return 0;
1387 /* Try to expand a block move operation to an RTL block move instruction.
1388 If not optimizing or if the block size is not a constant or if the
1389 block is small, the expansion fails and GCC falls back to calling
1390 memcpy().
1392 operands[0] is the destination
1393 operands[1] is the source
1394 operands[2] is the length
1395 operands[3] is the alignment */
1398 xtensa_expand_block_move (operands)
1399 rtx *operands;
1401 rtx dest = operands[0];
1402 rtx src = operands[1];
1403 int bytes = INTVAL (operands[2]);
1404 int align = XINT (operands[3], 0);
1405 int num_pieces, move_ratio;
1407 /* If this is not a fixed size move, just call memcpy */
1408 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1409 return 0;
1411 /* Anything to move? */
1412 if (bytes <= 0)
1413 return 1;
1415 if (align > MOVE_MAX)
1416 align = MOVE_MAX;
1418 /* decide whether to expand inline based on the optimization level */
1419 move_ratio = 4;
1420 if (optimize > 2)
1421 move_ratio = LARGEST_MOVE_RATIO;
1422 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1423 if (num_pieces >= move_ratio)
1424 return 0;
1426 /* make sure the memory addresses are valid */
1427 operands[0] = validize_mem (dest);
1428 operands[1] = validize_mem (src);
1430 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1431 operands[2], operands[3]));
1432 return 1;
1436 /* Emit a sequence of instructions to implement a block move, trying
1437 to hide load delay slots as much as possible. Load N values into
1438 temporary registers, store those N values, and repeat until the
1439 complete block has been moved. N=delay_slots+1 */
1441 struct meminsnbuf {
1442 char template[30];
1443 rtx operands[2];
1446 void
1447 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1448 rtx *operands;
1449 rtx *tmpregs;
1450 int delay_slots;
1452 rtx dest = operands[0];
1453 rtx src = operands[1];
1454 int bytes = INTVAL (operands[2]);
1455 int align = XINT (operands[3], 0);
1456 rtx from_addr = XEXP (src, 0);
1457 rtx to_addr = XEXP (dest, 0);
1458 int from_struct = MEM_IN_STRUCT_P (src);
1459 int to_struct = MEM_IN_STRUCT_P (dest);
1460 int offset = 0;
1461 int chunk_size, item_size;
1462 struct meminsnbuf *ldinsns, *stinsns;
1463 const char *ldname, *stname;
1464 enum machine_mode mode;
1466 if (align > MOVE_MAX)
1467 align = MOVE_MAX;
1468 item_size = align;
1469 chunk_size = delay_slots + 1;
1471 ldinsns = (struct meminsnbuf *)
1472 alloca (chunk_size * sizeof (struct meminsnbuf));
1473 stinsns = (struct meminsnbuf *)
1474 alloca (chunk_size * sizeof (struct meminsnbuf));
1476 mode = xtensa_find_mode_for_size (item_size);
1477 item_size = GET_MODE_SIZE (mode);
1478 ldname = xtensa_ld_opcodes[(int) mode];
1479 stname = xtensa_st_opcodes[(int) mode];
1481 while (bytes > 0)
1483 int n;
1485 for (n = 0; n < chunk_size; n++)
1487 rtx addr, mem;
1489 if (bytes == 0)
1491 chunk_size = n;
1492 break;
1495 if (bytes < item_size)
1497 /* find a smaller item_size which we can load & store */
1498 item_size = bytes;
1499 mode = xtensa_find_mode_for_size (item_size);
1500 item_size = GET_MODE_SIZE (mode);
1501 ldname = xtensa_ld_opcodes[(int) mode];
1502 stname = xtensa_st_opcodes[(int) mode];
1505 /* record the load instruction opcode and operands */
1506 addr = plus_constant (from_addr, offset);
1507 mem = gen_rtx_MEM (mode, addr);
1508 if (! memory_address_p (mode, addr))
1509 abort ();
1510 MEM_IN_STRUCT_P (mem) = from_struct;
1511 ldinsns[n].operands[0] = tmpregs[n];
1512 ldinsns[n].operands[1] = mem;
1513 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1515 /* record the store instruction opcode and operands */
1516 addr = plus_constant (to_addr, offset);
1517 mem = gen_rtx_MEM (mode, addr);
1518 if (! memory_address_p (mode, addr))
1519 abort ();
1520 MEM_IN_STRUCT_P (mem) = to_struct;
1521 stinsns[n].operands[0] = tmpregs[n];
1522 stinsns[n].operands[1] = mem;
1523 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1525 offset += item_size;
1526 bytes -= item_size;
1529 /* now output the loads followed by the stores */
1530 for (n = 0; n < chunk_size; n++)
1531 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1532 for (n = 0; n < chunk_size; n++)
1533 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1538 static enum machine_mode
1539 xtensa_find_mode_for_size (item_size)
1540 unsigned item_size;
1542 enum machine_mode mode, tmode;
1544 while (1)
1546 mode = VOIDmode;
1548 /* find mode closest to but not bigger than item_size */
1549 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1550 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1551 if (GET_MODE_SIZE (tmode) <= item_size)
1552 mode = tmode;
1553 if (mode == VOIDmode)
1554 abort ();
1556 item_size = GET_MODE_SIZE (mode);
1558 if (xtensa_ld_opcodes[(int) mode]
1559 && xtensa_st_opcodes[(int) mode])
1560 break;
1562 /* cannot load & store this mode; try something smaller */
1563 item_size -= 1;
1566 return mode;
1570 void
1571 xtensa_expand_nonlocal_goto (operands)
1572 rtx *operands;
1574 rtx goto_handler = operands[1];
1575 rtx containing_fp = operands[3];
1577 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1578 is too big to generate in-line */
1580 if (GET_CODE (containing_fp) != REG)
1581 containing_fp = force_reg (Pmode, containing_fp);
1583 goto_handler = replace_rtx (copy_rtx (goto_handler),
1584 virtual_stack_vars_rtx,
1585 containing_fp);
1587 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1588 0, VOIDmode, 2,
1589 containing_fp, Pmode,
1590 goto_handler, Pmode);
1594 static struct machine_function *
1595 xtensa_init_machine_status ()
1597 return ggc_alloc_cleared (sizeof (struct machine_function));
1601 void
1602 xtensa_setup_frame_addresses ()
1604 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1605 cfun->machine->accesses_prev_frame = 1;
1607 emit_library_call
1608 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1609 0, VOIDmode, 0);
1613 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1614 a comment showing where the end of the loop is. However, if there is a
1615 label or a branch at the end of the loop then we need to place a nop
1616 there. If the loop ends with a label we need the nop so that branches
1617 targetting that label will target the nop (and thus remain in the loop),
1618 instead of targetting the instruction after the loop (and thus exiting
1619 the loop). If the loop ends with a branch, we need the nop in case the
1620 branch is targetting a location inside the loop. When the branch
1621 executes it will cause the loop count to be decremented even if it is
1622 taken (because it is the last instruction in the loop), so we need to
1623 nop after the branch to prevent the loop count from being decremented
1624 when the branch is taken. */
1626 void
1627 xtensa_emit_loop_end (insn, operands)
1628 rtx insn;
1629 rtx *operands;
1631 char done = 0;
1633 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1635 switch (GET_CODE (insn))
1637 case NOTE:
1638 case BARRIER:
1639 break;
1641 case CODE_LABEL:
1642 output_asm_insn ("nop.n", operands);
1643 done = 1;
1644 break;
1646 default:
1648 rtx body = PATTERN (insn);
1650 if (GET_CODE (body) == JUMP_INSN)
1652 output_asm_insn ("nop.n", operands);
1653 done = 1;
1655 else if ((GET_CODE (body) != USE)
1656 && (GET_CODE (body) != CLOBBER))
1657 done = 1;
1659 break;
1663 output_asm_insn ("# loop end for %0", operands);
1667 char *
1668 xtensa_emit_call (callop, operands)
1669 int callop;
1670 rtx *operands;
1672 static char result[64];
1673 rtx tgt = operands[callop];
1675 if (GET_CODE (tgt) == CONST_INT)
1676 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1677 else if (register_operand (tgt, VOIDmode))
1678 sprintf (result, "callx8\t%%%d", callop);
1679 else
1680 sprintf (result, "call8\t%%%d", callop);
1682 return result;
1686 /* Return the stabs register number to use for 'regno'. */
1689 xtensa_dbx_register_number (regno)
1690 int regno;
1692 int first = -1;
1694 if (GP_REG_P (regno)) {
1695 regno -= GP_REG_FIRST;
1696 first = 0;
1698 else if (BR_REG_P (regno)) {
1699 regno -= BR_REG_FIRST;
1700 first = 16;
1702 else if (FP_REG_P (regno)) {
1703 regno -= FP_REG_FIRST;
1704 /* The current numbering convention is that TIE registers are
1705 numbered in libcc order beginning with 256. We can't guarantee
1706 that the FP registers will come first, so the following is just
1707 a guess. It seems like we should make a special case for FP
1708 registers and give them fixed numbers < 256. */
1709 first = 256;
1711 else if (ACC_REG_P (regno))
1713 first = 0;
1714 regno = -1;
1717 /* When optimizing, we sometimes get asked about pseudo-registers
1718 that don't represent hard registers. Return 0 for these. */
1719 if (first == -1)
1720 return 0;
1722 return first + regno;
1726 /* Argument support functions. */
1728 /* Initialize CUMULATIVE_ARGS for a function. */
1730 void
1731 init_cumulative_args (cum, fntype, libname)
1732 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1733 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1734 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1736 cum->arg_words = 0;
1739 /* Advance the argument to the next argument position. */
1741 void
1742 function_arg_advance (cum, mode, type)
1743 CUMULATIVE_ARGS *cum; /* current arg information */
1744 enum machine_mode mode; /* current arg mode */
1745 tree type; /* type of the argument or 0 if lib support */
1747 int words, max;
1748 int *arg_words;
1750 arg_words = &cum->arg_words;
1751 max = MAX_ARGS_IN_REGISTERS;
1753 words = (((mode != BLKmode)
1754 ? (int) GET_MODE_SIZE (mode)
1755 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1757 if ((*arg_words + words > max) && (*arg_words < max))
1758 *arg_words = max;
1760 *arg_words += words;
1764 /* Return an RTL expression containing the register for the given mode,
1765 or 0 if the argument is to be passed on the stack. */
1768 function_arg (cum, mode, type, incoming_p)
1769 CUMULATIVE_ARGS *cum; /* current arg information */
1770 enum machine_mode mode; /* current arg mode */
1771 tree type; /* type of the argument or 0 if lib support */
1772 int incoming_p; /* computing the incoming registers? */
1774 int regbase, words, max;
1775 int *arg_words;
1776 int regno;
1777 enum machine_mode result_mode;
1779 arg_words = &cum->arg_words;
1780 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1781 max = MAX_ARGS_IN_REGISTERS;
1783 words = (((mode != BLKmode)
1784 ? (int) GET_MODE_SIZE (mode)
1785 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1787 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1788 *arg_words += (*arg_words & 1);
1790 if (*arg_words + words > max)
1791 return (rtx)0;
1793 regno = regbase + *arg_words;
1794 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1796 /* We need to make sure that references to a7 are represented with
1797 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1798 modes bigger than 2 words (because we only have patterns for
1799 modes of 2 words or smaller), we can't control the expansion
1800 unless we explicitly list the individual registers in a PARALLEL. */
1802 if ((mode == BLKmode || words > 2)
1803 && regno < A7_REG
1804 && regno + words > A7_REG)
1806 rtx result;
1807 int n;
1809 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1810 for (n = 0; n < words; n++)
1812 XVECEXP (result, 0, n) =
1813 gen_rtx_EXPR_LIST (VOIDmode,
1814 gen_raw_REG (SImode, regno + n),
1815 GEN_INT (n * UNITS_PER_WORD));
1817 return result;
1820 return gen_raw_REG (result_mode, regno);
1824 void
1825 override_options ()
1827 int regno;
1828 enum machine_mode mode;
1830 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1831 error ("boolean registers required for the floating-point option");
1833 /* set up the tables of ld/st opcode names for block moves */
1834 xtensa_ld_opcodes[(int) SImode] = "l32i";
1835 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1836 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1837 xtensa_st_opcodes[(int) SImode] = "s32i";
1838 xtensa_st_opcodes[(int) HImode] = "s16i";
1839 xtensa_st_opcodes[(int) QImode] = "s8i";
1841 xtensa_char_to_class['q'] = SP_REG;
1842 xtensa_char_to_class['a'] = GR_REGS;
1843 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1844 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1845 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1846 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1847 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1848 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1849 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1851 /* Set up array giving whether a given register can hold a given mode. */
1852 for (mode = VOIDmode;
1853 mode != MAX_MACHINE_MODE;
1854 mode = (enum machine_mode) ((int) mode + 1))
1856 int size = GET_MODE_SIZE (mode);
1857 enum mode_class class = GET_MODE_CLASS (mode);
1859 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1861 int temp;
1863 if (ACC_REG_P (regno))
1864 temp = (TARGET_MAC16 &&
1865 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1866 else if (GP_REG_P (regno))
1867 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1868 else if (FP_REG_P (regno))
1869 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1870 else if (BR_REG_P (regno))
1871 temp = (TARGET_BOOLEANS && (mode == CCmode));
1872 else
1873 temp = FALSE;
1875 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1879 init_machine_status = xtensa_init_machine_status;
1881 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1882 some targets need to always use PIC. */
1883 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1884 flag_pic = 1;
1888 /* A C compound statement to output to stdio stream STREAM the
1889 assembler syntax for an instruction operand X. X is an RTL
1890 expression.
1892 CODE is a value that can be used to specify one of several ways
1893 of printing the operand. It is used when identical operands
1894 must be printed differently depending on the context. CODE
1895 comes from the '%' specification that was used to request
1896 printing of the operand. If the specification was just '%DIGIT'
1897 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1898 is the ASCII code for LTR.
1900 If X is a register, this macro should print the register's name.
1901 The names can be found in an array 'reg_names' whose type is
1902 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1904 When the machine description has a specification '%PUNCT' (a '%'
1905 followed by a punctuation character), this macro is called with
1906 a null pointer for X and the punctuation character for CODE.
1908 'a', 'c', 'l', and 'n' are reserved.
1910 The Xtensa specific codes are:
1912 'd' CONST_INT, print as signed decimal
1913 'x' CONST_INT, print as signed hexadecimal
1914 'K' CONST_INT, print number of bits in mask for EXTUI
1915 'R' CONST_INT, print (X & 0x1f)
1916 'L' CONST_INT, print ((32 - X) & 0x1f)
1917 'D' REG, print second register of double-word register operand
1918 'N' MEM, print address of next word following a memory operand
1919 'v' MEM, if memory reference is volatile, output a MEMW before it
1922 static void
1923 printx (file, val)
1924 FILE *file;
1925 signed int val;
1927 /* print a hexadecimal value in a nice way */
1928 if ((val > -0xa) && (val < 0xa))
1929 fprintf (file, "%d", val);
1930 else if (val < 0)
1931 fprintf (file, "-0x%x", -val);
1932 else
1933 fprintf (file, "0x%x", val);
1937 void
1938 print_operand (file, op, letter)
1939 FILE *file; /* file to write to */
1940 rtx op; /* operand to print */
1941 int letter; /* %<letter> or 0 */
1943 enum rtx_code code;
1945 if (! op)
1946 error ("PRINT_OPERAND null pointer");
1948 code = GET_CODE (op);
1949 switch (code)
1951 case REG:
1952 case SUBREG:
1954 int regnum = xt_true_regnum (op);
1955 if (letter == 'D')
1956 regnum++;
1957 fprintf (file, "%s", reg_names[regnum]);
1958 break;
1961 case MEM:
1962 /* For a volatile memory reference, emit a MEMW before the
1963 load or store. */
1964 if (letter == 'v')
1966 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1967 fprintf (file, "memw\n\t");
1968 break;
1970 else if (letter == 'N')
1972 enum machine_mode mode;
1973 switch (GET_MODE (op))
1975 case DFmode: mode = SFmode; break;
1976 case DImode: mode = SImode; break;
1977 default: abort ();
1979 op = adjust_address (op, mode, 4);
1982 output_address (XEXP (op, 0));
1983 break;
1985 case CONST_INT:
1986 switch (letter)
1988 case 'K':
1990 int num_bits = 0;
1991 unsigned val = INTVAL (op);
1992 while (val & 1)
1994 num_bits += 1;
1995 val = val >> 1;
1997 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1998 fatal_insn ("invalid mask", op);
2000 fprintf (file, "%d", num_bits);
2001 break;
2004 case 'L':
2005 fprintf (file, "%ld", (32 - INTVAL (op)) & 0x1f);
2006 break;
2008 case 'R':
2009 fprintf (file, "%ld", INTVAL (op) & 0x1f);
2010 break;
2012 case 'x':
2013 printx (file, INTVAL (op));
2014 break;
2016 case 'd':
2017 default:
2018 fprintf (file, "%ld", INTVAL (op));
2019 break;
2022 break;
2024 default:
2025 output_addr_const (file, op);
2030 /* A C compound statement to output to stdio stream STREAM the
2031 assembler syntax for an instruction operand that is a memory
2032 reference whose address is ADDR. ADDR is an RTL expression. */
2034 void
2035 print_operand_address (file, addr)
2036 FILE *file;
2037 rtx addr;
2039 if (!addr)
2040 error ("PRINT_OPERAND_ADDRESS, null pointer");
2042 switch (GET_CODE (addr))
2044 default:
2045 fatal_insn ("invalid address", addr);
2046 break;
2048 case REG:
2049 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2050 break;
2052 case PLUS:
2054 rtx reg = (rtx)0;
2055 rtx offset = (rtx)0;
2056 rtx arg0 = XEXP (addr, 0);
2057 rtx arg1 = XEXP (addr, 1);
2059 if (GET_CODE (arg0) == REG)
2061 reg = arg0;
2062 offset = arg1;
2064 else if (GET_CODE (arg1) == REG)
2066 reg = arg1;
2067 offset = arg0;
2069 else
2070 fatal_insn ("no register in address", addr);
2072 if (CONSTANT_P (offset))
2074 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2075 output_addr_const (file, offset);
2077 else
2078 fatal_insn ("address offset not a constant", addr);
2080 break;
2082 case LABEL_REF:
2083 case SYMBOL_REF:
2084 case CONST_INT:
2085 case CONST:
2086 output_addr_const (file, addr);
2087 break;
2092 /* Emit either a label, .comm, or .lcomm directive. */
2094 void
2095 xtensa_declare_object (file, name, init_string, final_string, size)
2096 FILE *file;
2097 char *name;
2098 char *init_string;
2099 char *final_string;
2100 int size;
2102 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2103 assemble_name (file, name);
2104 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2108 void
2109 xtensa_output_literal (file, x, mode, labelno)
2110 FILE *file;
2111 rtx x;
2112 enum machine_mode mode;
2113 int labelno;
2115 long value_long[2];
2116 REAL_VALUE_TYPE r;
2117 int size;
2119 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2121 switch (GET_MODE_CLASS (mode))
2123 case MODE_FLOAT:
2124 if (GET_CODE (x) != CONST_DOUBLE)
2125 abort ();
2127 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2128 switch (mode)
2130 case SFmode:
2131 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2132 fprintf (file, "0x%08lx\n", value_long[0]);
2133 break;
2135 case DFmode:
2136 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2137 fprintf (file, "0x%08lx, 0x%08lx\n",
2138 value_long[0], value_long[1]);
2139 break;
2141 default:
2142 abort ();
2145 break;
2147 case MODE_INT:
2148 case MODE_PARTIAL_INT:
2149 size = GET_MODE_SIZE (mode);
2150 if (size == 4)
2152 output_addr_const (file, x);
2153 fputs ("\n", file);
2155 else if (size == 8)
2157 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2158 fputs (", ", file);
2159 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2160 fputs ("\n", file);
2162 else
2163 abort ();
2164 break;
2166 default:
2167 abort ();
2172 /* Return the bytes needed to compute the frame pointer from the current
2173 stack pointer. */
2175 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2176 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2178 long
2179 compute_frame_size (size)
2180 int size; /* # of var. bytes allocated */
2182 /* add space for the incoming static chain value */
2183 if (current_function_needs_context)
2184 size += (1 * UNITS_PER_WORD);
2186 xtensa_current_frame_size =
2187 XTENSA_STACK_ALIGN (size
2188 + current_function_outgoing_args_size
2189 + (WINDOW_SIZE * UNITS_PER_WORD));
2190 return xtensa_current_frame_size;
2195 xtensa_frame_pointer_required ()
2197 /* The code to expand builtin_frame_addr and builtin_return_addr
2198 currently uses the hard_frame_pointer instead of frame_pointer.
2199 This seems wrong but maybe it's necessary for other architectures.
2200 This function is derived from the i386 code. */
2202 if (cfun->machine->accesses_prev_frame)
2203 return 1;
2205 return 0;
2209 void
2210 xtensa_reorg (first)
2211 rtx first;
2213 rtx insn, set_frame_ptr_insn = 0;
2215 unsigned long tsize = compute_frame_size (get_frame_size ());
2216 if (tsize < (1 << (12+3)))
2217 frame_size_const = 0;
2218 else
2220 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2222 /* make sure the constant is used so it doesn't get eliminated
2223 from the constant pool */
2224 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2227 if (!frame_pointer_needed)
2228 return;
2230 /* Search all instructions, looking for the insn that sets up the
2231 frame pointer. This search will fail if the function does not
2232 have an incoming argument in $a7, but in that case, we can just
2233 set up the frame pointer at the very beginning of the
2234 function. */
2236 for (insn = first; insn; insn = NEXT_INSN (insn))
2238 rtx pat;
2240 if (!INSN_P (insn))
2241 continue;
2243 pat = PATTERN (insn);
2244 if (GET_CODE (pat) == SET
2245 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2246 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
2248 set_frame_ptr_insn = insn;
2249 break;
2253 if (set_frame_ptr_insn)
2255 /* for all instructions prior to set_frame_ptr_insn, replace
2256 hard_frame_pointer references with stack_pointer */
2257 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2259 if (INSN_P (insn))
2260 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2261 hard_frame_pointer_rtx,
2262 stack_pointer_rtx);
2265 else
2267 /* emit the frame pointer move immediately after the NOTE that starts
2268 the function */
2269 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2270 stack_pointer_rtx), first);
2275 /* Set up the stack and frame (if desired) for the function. */
2277 void
2278 xtensa_function_prologue (file, size)
2279 FILE *file;
2280 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2282 unsigned long tsize = compute_frame_size (get_frame_size ());
2284 if (frame_pointer_needed)
2285 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2286 else
2287 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2290 if (tsize < (1 << (12+3)))
2292 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2294 else
2296 fprintf (file, "\tentry\tsp, 16\n");
2298 /* use a8 as a temporary since a0-a7 may be live */
2299 fprintf (file, "\tl32r\ta8, ");
2300 print_operand (file, frame_size_const, 0);
2301 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2302 fprintf (file, "\tmovsp\tsp, a8\n");
2307 /* Do any necessary cleanup after a function to restore
2308 stack, frame, and regs. */
2310 void
2311 xtensa_function_epilogue (file, size)
2312 FILE *file;
2313 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2315 rtx insn = get_last_insn ();
2316 /* If the last insn was a BARRIER, we don't have to write anything. */
2317 if (GET_CODE (insn) == NOTE)
2318 insn = prev_nonnote_insn (insn);
2319 if (insn == 0 || GET_CODE (insn) != BARRIER)
2320 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2322 xtensa_current_frame_size = 0;
2327 xtensa_return_addr (count, frame)
2328 int count;
2329 rtx frame;
2331 rtx result, retaddr;
2333 if (count == -1)
2334 retaddr = gen_rtx_REG (Pmode, 0);
2335 else
2337 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2338 addr = memory_address (Pmode, addr);
2339 retaddr = gen_reg_rtx (Pmode);
2340 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2343 /* The 2 most-significant bits of the return address on Xtensa hold
2344 the register window size. To get the real return address, these
2345 bits must be replaced with the high bits from the current PC. */
2347 result = gen_reg_rtx (Pmode);
2348 emit_insn (gen_fix_return_addr (result, retaddr));
2349 return result;
2353 /* Create the va_list data type.
2354 This structure is set up by __builtin_saveregs. The __va_reg
2355 field points to a stack-allocated region holding the contents of the
2356 incoming argument registers. The __va_ndx field is an index initialized
2357 to the position of the first unnamed (variable) argument. This same index
2358 is also used to address the arguments passed in memory. Thus, the
2359 __va_stk field is initialized to point to the position of the first
2360 argument in memory offset to account for the arguments passed in
2361 registers. E.G., if there are 6 argument registers, and each register is
2362 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2363 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2364 argument word N for N >= 6. */
2366 tree
2367 xtensa_build_va_list ()
2369 tree f_stk, f_reg, f_ndx, record, type_decl;
2371 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2372 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2374 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2375 ptr_type_node);
2376 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2377 ptr_type_node);
2378 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2379 integer_type_node);
2381 DECL_FIELD_CONTEXT (f_stk) = record;
2382 DECL_FIELD_CONTEXT (f_reg) = record;
2383 DECL_FIELD_CONTEXT (f_ndx) = record;
2385 TREE_CHAIN (record) = type_decl;
2386 TYPE_NAME (record) = type_decl;
2387 TYPE_FIELDS (record) = f_stk;
2388 TREE_CHAIN (f_stk) = f_reg;
2389 TREE_CHAIN (f_reg) = f_ndx;
2391 layout_type (record);
2392 return record;
2396 /* Save the incoming argument registers on the stack. Returns the
2397 address of the saved registers. */
2400 xtensa_builtin_saveregs ()
2402 rtx gp_regs, dest;
2403 int arg_words = current_function_arg_words;
2404 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2405 int i;
2407 if (gp_left == 0)
2408 return const0_rtx;
2410 /* allocate the general-purpose register space */
2411 gp_regs = assign_stack_local
2412 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2413 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2415 /* Now store the incoming registers. */
2416 dest = change_address (gp_regs, SImode,
2417 plus_constant (XEXP (gp_regs, 0),
2418 arg_words * UNITS_PER_WORD));
2420 /* Note: Don't use move_block_from_reg() here because the incoming
2421 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2422 Instead, call gen_raw_REG() directly so that we get a distinct
2423 instance of (REG:SI 7). */
2424 for (i = 0; i < gp_left; i++)
2426 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2427 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2430 return XEXP (gp_regs, 0);
2434 /* Implement `va_start' for varargs and stdarg. We look at the
2435 current function to fill in an initial va_list. */
2437 void
2438 xtensa_va_start (valist, nextarg)
2439 tree valist;
2440 rtx nextarg ATTRIBUTE_UNUSED;
2442 tree f_stk, stk;
2443 tree f_reg, reg;
2444 tree f_ndx, ndx;
2445 tree t, u;
2446 int arg_words;
2448 arg_words = current_function_args_info.arg_words;
2450 f_stk = TYPE_FIELDS (va_list_type_node);
2451 f_reg = TREE_CHAIN (f_stk);
2452 f_ndx = TREE_CHAIN (f_reg);
2454 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2455 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2456 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2458 /* Call __builtin_saveregs; save the result in __va_reg */
2459 current_function_arg_words = arg_words;
2460 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2461 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2462 TREE_SIDE_EFFECTS (t) = 1;
2463 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2465 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2466 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2467 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2468 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2469 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2470 TREE_SIDE_EFFECTS (t) = 1;
2471 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2473 /* Set the __va_ndx member. */
2474 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2475 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2476 TREE_SIDE_EFFECTS (t) = 1;
2477 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2481 /* Implement `va_arg'. */
2484 xtensa_va_arg (valist, type)
2485 tree valist, type;
2487 tree f_stk, stk;
2488 tree f_reg, reg;
2489 tree f_ndx, ndx;
2490 tree tmp, addr_tree, type_size;
2491 rtx array, orig_ndx, r, addr, size, va_size;
2492 rtx lab_false, lab_over, lab_false2;
2494 f_stk = TYPE_FIELDS (va_list_type_node);
2495 f_reg = TREE_CHAIN (f_stk);
2496 f_ndx = TREE_CHAIN (f_reg);
2498 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2499 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2500 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2502 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2504 va_size = gen_reg_rtx (SImode);
2505 tmp = fold (build (MULT_EXPR, sizetype,
2506 fold (build (TRUNC_DIV_EXPR, sizetype,
2507 fold (build (PLUS_EXPR, sizetype,
2508 type_size,
2509 size_int (UNITS_PER_WORD - 1))),
2510 size_int (UNITS_PER_WORD))),
2511 size_int (UNITS_PER_WORD)));
2512 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2513 if (r != va_size)
2514 emit_move_insn (va_size, r);
2517 /* First align __va_ndx to a double word boundary if necessary for this arg:
2519 if (__alignof__ (TYPE) > 4)
2520 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2523 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2525 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2526 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2527 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2528 build_int_2 (-2 * UNITS_PER_WORD, -1));
2529 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2530 TREE_SIDE_EFFECTS (tmp) = 1;
2531 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2535 /* Increment __va_ndx to point past the argument:
2537 orig_ndx = (AP).__va_ndx;
2538 (AP).__va_ndx += __va_size (TYPE);
2541 orig_ndx = gen_reg_rtx (SImode);
2542 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2543 if (r != orig_ndx)
2544 emit_move_insn (orig_ndx, r);
2546 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2547 make_tree (intSI_type_node, va_size));
2548 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2549 TREE_SIDE_EFFECTS (tmp) = 1;
2550 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2553 /* Check if the argument is in registers:
2555 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2556 && !MUST_PASS_IN_STACK (type))
2557 __array = (AP).__va_reg;
2560 array = gen_reg_rtx (Pmode);
2562 lab_over = NULL_RTX;
2563 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2565 lab_false = gen_label_rtx ();
2566 lab_over = gen_label_rtx ();
2568 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2569 EXPAND_NORMAL),
2570 GEN_INT (MAX_ARGS_IN_REGISTERS
2571 * UNITS_PER_WORD),
2572 GT, const1_rtx, SImode, 0, lab_false);
2574 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2575 if (r != array)
2576 emit_move_insn (array, r);
2578 emit_jump_insn (gen_jump (lab_over));
2579 emit_barrier ();
2580 emit_label (lab_false);
2583 /* ...otherwise, the argument is on the stack (never split between
2584 registers and the stack -- change __va_ndx if necessary):
2586 else
2588 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2589 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2590 __array = (AP).__va_stk;
2594 lab_false2 = gen_label_rtx ();
2595 emit_cmp_and_jump_insns (orig_ndx,
2596 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2597 GE, const1_rtx, SImode, 0, lab_false2);
2599 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2600 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2601 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2602 TREE_SIDE_EFFECTS (tmp) = 1;
2603 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2605 emit_label (lab_false2);
2607 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2608 if (r != array)
2609 emit_move_insn (array, r);
2611 if (lab_over != NULL_RTX)
2612 emit_label (lab_over);
2615 /* Given the base array pointer (__array) and index to the subsequent
2616 argument (__va_ndx), find the address:
2618 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2619 ? sizeof (TYPE)
2620 : __va_size (TYPE))
2622 The results are endian-dependent because values smaller than one word
2623 are aligned differently.
2626 size = gen_reg_rtx (SImode);
2627 emit_move_insn (size, va_size);
2629 if (BYTES_BIG_ENDIAN)
2631 rtx lab_use_va_size = gen_label_rtx ();
2633 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2634 EXPAND_NORMAL),
2635 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2636 GE, const1_rtx, SImode, 0, lab_use_va_size);
2638 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2639 if (r != size)
2640 emit_move_insn (size, r);
2642 emit_label (lab_use_va_size);
2645 addr_tree = build (PLUS_EXPR, ptr_type_node,
2646 make_tree (ptr_type_node, array),
2647 ndx);
2648 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2649 make_tree (intSI_type_node, size));
2650 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2651 addr = copy_to_reg (addr);
2652 return addr;
2656 enum reg_class
2657 xtensa_preferred_reload_class (x, class, isoutput)
2658 rtx x;
2659 enum reg_class class;
2660 int isoutput;
2662 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2663 return NO_REGS;
2665 /* Don't use the stack pointer or hard frame pointer for reloads!
2666 The hard frame pointer would normally be OK except that it may
2667 briefly hold an incoming argument in the prologue, and reload
2668 won't know that it is live because the hard frame pointer is
2669 treated specially. */
2671 if (class == AR_REGS || class == GR_REGS)
2672 return RL_REGS;
2674 return class;
2678 enum reg_class
2679 xtensa_secondary_reload_class (class, mode, x, isoutput)
2680 enum reg_class class;
2681 enum machine_mode mode ATTRIBUTE_UNUSED;
2682 rtx x;
2683 int isoutput;
2685 int regno;
2687 if (GET_CODE (x) == SIGN_EXTEND)
2688 x = XEXP (x, 0);
2689 regno = xt_true_regnum (x);
2691 if (!isoutput)
2693 if (class == FP_REGS && constantpool_mem_p (x))
2694 return RL_REGS;
2697 if (ACC_REG_P (regno))
2698 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2699 if (class == ACC_REG)
2700 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2702 return NO_REGS;
2706 void
2707 order_regs_for_local_alloc ()
2709 if (!leaf_function_p ())
2711 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2712 FIRST_PSEUDO_REGISTER * sizeof (int));
2714 else
2716 int i, num_arg_regs;
2717 int nxt = 0;
2719 /* use the AR registers in increasing order (skipping a0 and a1)
2720 but save the incoming argument registers for a last resort */
2721 num_arg_regs = current_function_args_info.arg_words;
2722 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2723 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2724 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2725 reg_alloc_order[nxt++] = i + num_arg_regs;
2726 for (i = 0; i < num_arg_regs; i++)
2727 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2729 /* list the coprocessor registers in order */
2730 for (i = 0; i < BR_REG_NUM; i++)
2731 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2733 /* list the FP registers in order for now */
2734 for (i = 0; i < 16; i++)
2735 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2737 /* GCC requires that we list *all* the registers.... */
2738 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2739 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2740 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2741 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2743 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2748 /* A customized version of reg_overlap_mentioned_p that only looks for
2749 references to a7 (as opposed to hard_frame_pointer_rtx). */
2752 a7_overlap_mentioned_p (x)
2753 rtx x;
2755 int i, j;
2756 unsigned int x_regno;
2757 const char *fmt;
2759 if (GET_CODE (x) == REG)
2761 x_regno = REGNO (x);
2762 return (x != hard_frame_pointer_rtx
2763 && x_regno < A7_REG + 1
2764 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2767 if (GET_CODE (x) == SUBREG
2768 && GET_CODE (SUBREG_REG (x)) == REG
2769 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2771 x_regno = subreg_regno (x);
2772 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2773 && x_regno < A7_REG + 1
2774 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2777 /* X does not match, so try its subexpressions. */
2778 fmt = GET_RTX_FORMAT (GET_CODE (x));
2779 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2781 if (fmt[i] == 'e')
2783 if (a7_overlap_mentioned_p (XEXP (x, i)))
2784 return 1;
2786 else if (fmt[i] == 'E')
2788 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2789 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2790 return 1;
2794 return 0;
2798 /* Some Xtensa targets support multiple bss sections. If the section
2799 name ends with ".bss", add SECTION_BSS to the flags. */
2801 static unsigned int
2802 xtensa_multibss_section_type_flags (decl, name, reloc)
2803 tree decl;
2804 const char *name;
2805 int reloc;
2807 unsigned int flags = default_section_type_flags (decl, name, reloc);
2808 const char *suffix;
2810 suffix = strrchr (name, '.');
2811 if (suffix && strcmp (suffix, ".bss") == 0)
2813 if (!decl || (TREE_CODE (decl) == VAR_DECL
2814 && DECL_INITIAL (decl) == NULL_TREE))
2815 flags |= SECTION_BSS; /* @nobits */
2816 else
2817 warning ("only uninitialized variables can be placed in a "
2818 ".bss section");
2821 return flags;
2825 /* The literal pool stays with the function. */
2827 static void
2828 xtensa_select_rtx_section (mode, x, align)
2829 enum machine_mode mode ATTRIBUTE_UNUSED;
2830 rtx x ATTRIBUTE_UNUSED;
2831 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2833 function_section (current_function_decl);
2836 /* If we are referencing a function that is static, make the SYMBOL_REF
2837 special so that we can generate direct calls to it even with -fpic. */
2839 static void
2840 xtensa_encode_section_info (decl, first)
2841 tree decl;
2842 int first ATTRIBUTE_UNUSED;
2844 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2845 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2848 /* Compute a (partial) cost for rtx X. Return true if the complete
2849 cost has been computed, and false if subexpressions should be
2850 scanned. In either case, *TOTAL contains the cost result. */
2852 static bool
2853 xtensa_rtx_costs (x, code, outer_code, total)
2854 rtx x;
2855 int code, outer_code;
2856 int *total;
2858 switch (code)
2860 case CONST_INT:
2861 switch (outer_code)
2863 case SET:
2864 if (xtensa_simm12b (INTVAL (x)))
2866 *total = 4;
2867 return true;
2869 break;
2870 case PLUS:
2871 if (xtensa_simm8 (INTVAL (x))
2872 || xtensa_simm8x256 (INTVAL (x)))
2874 *total = 0;
2875 return true;
2877 break;
2878 case AND:
2879 if (xtensa_mask_immediate (INTVAL (x)))
2881 *total = 0;
2882 return true;
2884 break;
2885 case COMPARE:
2886 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2888 *total = 0;
2889 return true;
2891 break;
2892 case ASHIFT:
2893 case ASHIFTRT:
2894 case LSHIFTRT:
2895 case ROTATE:
2896 case ROTATERT:
2897 /* no way to tell if X is the 2nd operand so be conservative */
2898 default: break;
2900 if (xtensa_simm12b (INTVAL (x)))
2901 *total = 5;
2902 else
2903 *total = 6;
2904 return true;
2906 case CONST:
2907 case LABEL_REF:
2908 case SYMBOL_REF:
2909 *total = 5;
2910 return true;
2912 case CONST_DOUBLE:
2913 *total = 7;
2914 return true;
2916 case MEM:
2918 int num_words =
2919 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2921 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2922 *total = COSTS_N_INSNS (num_words);
2923 else
2924 *total = COSTS_N_INSNS (2*num_words);
2925 return true;
2928 case FFS:
2929 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2930 return true;
2932 case NOT:
2933 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2934 return true;
2936 case AND:
2937 case IOR:
2938 case XOR:
2939 if (GET_MODE (x) == DImode)
2940 *total = COSTS_N_INSNS (2);
2941 else
2942 *total = COSTS_N_INSNS (1);
2943 return true;
2945 case ASHIFT:
2946 case ASHIFTRT:
2947 case LSHIFTRT:
2948 if (GET_MODE (x) == DImode)
2949 *total = COSTS_N_INSNS (50);
2950 else
2951 *total = COSTS_N_INSNS (1);
2952 return true;
2954 case ABS:
2956 enum machine_mode xmode = GET_MODE (x);
2957 if (xmode == SFmode)
2958 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2959 else if (xmode == DFmode)
2960 *total = COSTS_N_INSNS (50);
2961 else
2962 *total = COSTS_N_INSNS (4);
2963 return true;
2966 case PLUS:
2967 case MINUS:
2969 enum machine_mode xmode = GET_MODE (x);
2970 if (xmode == SFmode)
2971 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2972 else if (xmode == DFmode || xmode == DImode)
2973 *total = COSTS_N_INSNS (50);
2974 else
2975 *total = COSTS_N_INSNS (1);
2976 return true;
2979 case NEG:
2980 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2981 return true;
2983 case MULT:
2985 enum machine_mode xmode = GET_MODE (x);
2986 if (xmode == SFmode)
2987 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2988 else if (xmode == DFmode || xmode == DImode)
2989 *total = COSTS_N_INSNS (50);
2990 else if (TARGET_MUL32)
2991 *total = COSTS_N_INSNS (4);
2992 else if (TARGET_MAC16)
2993 *total = COSTS_N_INSNS (16);
2994 else if (TARGET_MUL16)
2995 *total = COSTS_N_INSNS (12);
2996 else
2997 *total = COSTS_N_INSNS (50);
2998 return true;
3001 case DIV:
3002 case MOD:
3004 enum machine_mode xmode = GET_MODE (x);
3005 if (xmode == SFmode)
3007 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3008 return true;
3010 else if (xmode == DFmode)
3012 *total = COSTS_N_INSNS (50);
3013 return true;
3016 /* fall through */
3018 case UDIV:
3019 case UMOD:
3021 enum machine_mode xmode = GET_MODE (x);
3022 if (xmode == DImode)
3023 *total = COSTS_N_INSNS (50);
3024 else if (TARGET_DIV32)
3025 *total = COSTS_N_INSNS (32);
3026 else
3027 *total = COSTS_N_INSNS (50);
3028 return true;
3031 case SQRT:
3032 if (GET_MODE (x) == SFmode)
3033 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3034 else
3035 *total = COSTS_N_INSNS (50);
3036 return true;
3038 case SMIN:
3039 case UMIN:
3040 case SMAX:
3041 case UMAX:
3042 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3043 return true;
3045 case SIGN_EXTRACT:
3046 case SIGN_EXTEND:
3047 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3048 return true;
3050 case ZERO_EXTRACT:
3051 case ZERO_EXTEND:
3052 *total = COSTS_N_INSNS (1);
3053 return true;
3055 default:
3056 return false;
3060 #include "gt-xtensa.h"