1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
26 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "target-def.h"
46 /* Which cpu we're compiling for (NULL(=base), ???). */
47 const char *arc_cpu_string
;
50 /* Name of mangle string to add to symbols to separate code compiled for each
52 const char *arc_mangle_cpu
;
54 /* Save the operands last given to a compare for use when we
55 generate a scc or bcc insn. */
56 rtx arc_compare_op0
, arc_compare_op1
;
58 /* Name of text, data, and rodata sections, as specified on command line.
59 Selected by -m{text,data,rodata} flags. */
60 const char *arc_text_string
= ARC_DEFAULT_TEXT_SECTION
;
61 const char *arc_data_string
= ARC_DEFAULT_DATA_SECTION
;
62 const char *arc_rodata_string
= ARC_DEFAULT_RODATA_SECTION
;
64 /* Name of text, data, and rodata sections used in varasm.c. */
65 const char *arc_text_section
;
66 const char *arc_data_section
;
67 const char *arc_rodata_section
;
69 /* Array of valid operand punctuation characters. */
70 char arc_punct_chars
[256];
72 /* Variables used by arc_final_prescan_insn to implement conditional
74 static int arc_ccfsm_state
;
75 static int arc_ccfsm_current_cc
;
76 static rtx arc_ccfsm_target_insn
;
77 static int arc_ccfsm_target_label
;
79 /* The maximum number of insns skipped which will be conditionalised if
81 #define MAX_INSNS_SKIPPED 3
83 /* A nop is needed between a 4 byte insn that sets the condition codes and
84 a branch that uses them (the same isn't true for an 8 byte insn that sets
85 the condition codes). Set by arc_final_prescan_insn. Used by
87 static int last_insn_set_cc_p
;
88 static int current_insn_set_cc_p
;
89 static void record_cc_ref (rtx
);
90 static void arc_init_reg_tables (void);
91 static int get_arc_condition_code (rtx
);
92 const struct attribute_spec arc_attribute_table
[];
93 static tree
arc_handle_interrupt_attribute (tree
*, tree
, tree
, int, bool *);
94 static bool arc_assemble_integer (rtx
, unsigned int, int);
95 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT
);
96 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT
);
97 static void arc_file_start (void);
98 static void arc_internal_label (FILE *, const char *, unsigned long);
99 static bool arc_rtx_costs (rtx
, int, int, int *);
100 static int arc_address_cost (rtx
);
102 /* Initialize the GCC target structure. */
103 #undef TARGET_ASM_ALIGNED_HI_OP
104 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
105 #undef TARGET_ASM_ALIGNED_SI_OP
106 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
107 #undef TARGET_ASM_INTEGER
108 #define TARGET_ASM_INTEGER arc_assemble_integer
110 #undef TARGET_ASM_FUNCTION_PROLOGUE
111 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
112 #undef TARGET_ASM_FUNCTION_EPILOGUE
113 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
114 #undef TARGET_ASM_FILE_START
115 #define TARGET_ASM_FILE_START arc_file_start
116 #undef TARGET_ATTRIBUTE_TABLE
117 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
118 #undef TARGET_ASM_INTERNAL_LABEL
119 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
121 #undef TARGET_RTX_COSTS
122 #define TARGET_RTX_COSTS arc_rtx_costs
123 #undef TARGET_ADDRESS_COST
124 #define TARGET_ADDRESS_COST arc_address_cost
126 struct gcc_target targetm
= TARGET_INITIALIZER
;
128 /* Called by OVERRIDE_OPTIONS to initialize various things. */
135 if (arc_cpu_string
== 0
136 || !strcmp (arc_cpu_string
, "base"))
138 /* Ensure we have a printable value for the .cpu pseudo-op. */
139 arc_cpu_string
= "base";
141 arc_mangle_cpu
= NULL
;
143 else if (ARC_EXTENSION_CPU (arc_cpu_string
))
144 ; /* nothing to do */
147 error ("bad value (%s) for -mcpu switch", arc_cpu_string
);
148 arc_cpu_string
= "base";
150 arc_mangle_cpu
= NULL
;
153 /* Set the pseudo-ops for the various standard sections. */
154 arc_text_section
= tmp
= xmalloc (strlen (arc_text_string
) + sizeof (ARC_SECTION_FORMAT
) + 1);
155 sprintf (tmp
, ARC_SECTION_FORMAT
, arc_text_string
);
156 arc_data_section
= tmp
= xmalloc (strlen (arc_data_string
) + sizeof (ARC_SECTION_FORMAT
) + 1);
157 sprintf (tmp
, ARC_SECTION_FORMAT
, arc_data_string
);
158 arc_rodata_section
= tmp
= xmalloc (strlen (arc_rodata_string
) + sizeof (ARC_SECTION_FORMAT
) + 1);
159 sprintf (tmp
, ARC_SECTION_FORMAT
, arc_rodata_string
);
161 arc_init_reg_tables ();
163 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
164 memset (arc_punct_chars
, 0, sizeof (arc_punct_chars
));
165 arc_punct_chars
['#'] = 1;
166 arc_punct_chars
['*'] = 1;
167 arc_punct_chars
['?'] = 1;
168 arc_punct_chars
['!'] = 1;
169 arc_punct_chars
['~'] = 1;
172 /* The condition codes of the ARC, and the inverse function. */
173 static const char *const arc_condition_codes
[] =
175 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
176 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
179 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
181 /* Returns the index of the ARC condition code string in
182 `arc_condition_codes'. COMPARISON should be an rtx like
183 `(eq (...) (...))'. */
186 get_arc_condition_code (rtx comparison
)
188 switch (GET_CODE (comparison
))
196 case GTU
: return 14;
197 case LEU
: return 15;
206 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
207 return the mode to be used for the comparison. */
210 arc_select_cc_mode (enum rtx_code op
,
211 rtx x ATTRIBUTE_UNUSED
,
212 rtx y ATTRIBUTE_UNUSED
)
220 switch (GET_CODE (x
))
239 /* Vectors to keep interesting information about registers where it can easily
240 be got. We use to use the actual mode value as the bit number, but there
241 is (or may be) more than 32 modes now. Instead we use two tables: one
242 indexed by hard register number, and one indexed by mode. */
244 /* The purpose of arc_mode_class is to shrink the range of modes so that
245 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
246 mapped into one arc_mode_class mode. */
248 enum arc_mode_class
{
250 S_MODE
, D_MODE
, T_MODE
, O_MODE
,
251 SF_MODE
, DF_MODE
, TF_MODE
, OF_MODE
254 /* Modes for condition codes. */
255 #define C_MODES (1 << (int) C_MODE)
257 /* Modes for single-word and smaller quantities. */
258 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
260 /* Modes for double-word and smaller quantities. */
261 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
263 /* Modes for quad-word and smaller quantities. */
264 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
266 /* Value is 1 if register/mode pair is acceptable on arc. */
268 const unsigned int arc_hard_regno_mode_ok
[] = {
269 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
,
270 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
,
271 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, D_MODES
,
272 D_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
274 /* ??? Leave these as S_MODES for now. */
275 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
276 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
277 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
278 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, C_MODES
281 unsigned int arc_mode_class
[NUM_MACHINE_MODES
];
283 enum reg_class arc_regno_reg_class
[FIRST_PSEUDO_REGISTER
];
286 arc_init_reg_tables (void)
290 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
292 switch (GET_MODE_CLASS (i
))
295 case MODE_PARTIAL_INT
:
296 case MODE_COMPLEX_INT
:
297 if (GET_MODE_SIZE (i
) <= 4)
298 arc_mode_class
[i
] = 1 << (int) S_MODE
;
299 else if (GET_MODE_SIZE (i
) == 8)
300 arc_mode_class
[i
] = 1 << (int) D_MODE
;
301 else if (GET_MODE_SIZE (i
) == 16)
302 arc_mode_class
[i
] = 1 << (int) T_MODE
;
303 else if (GET_MODE_SIZE (i
) == 32)
304 arc_mode_class
[i
] = 1 << (int) O_MODE
;
306 arc_mode_class
[i
] = 0;
309 case MODE_COMPLEX_FLOAT
:
310 if (GET_MODE_SIZE (i
) <= 4)
311 arc_mode_class
[i
] = 1 << (int) SF_MODE
;
312 else if (GET_MODE_SIZE (i
) == 8)
313 arc_mode_class
[i
] = 1 << (int) DF_MODE
;
314 else if (GET_MODE_SIZE (i
) == 16)
315 arc_mode_class
[i
] = 1 << (int) TF_MODE
;
316 else if (GET_MODE_SIZE (i
) == 32)
317 arc_mode_class
[i
] = 1 << (int) OF_MODE
;
319 arc_mode_class
[i
] = 0;
322 arc_mode_class
[i
] = 1 << (int) C_MODE
;
325 arc_mode_class
[i
] = 0;
330 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
333 arc_regno_reg_class
[i
] = GENERAL_REGS
;
335 arc_regno_reg_class
[i
] = LPCOUNT_REG
;
337 arc_regno_reg_class
[i
] = NO_REGS
/* CC_REG: must be NO_REGS */;
339 arc_regno_reg_class
[i
] = NO_REGS
;
343 /* ARC specific attribute support.
345 The ARC has these attributes:
346 interrupt - for interrupt functions
349 const struct attribute_spec arc_attribute_table
[] =
351 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
352 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute
},
353 { NULL
, 0, 0, false, false, false, NULL
}
356 /* Handle an "interrupt" attribute; arguments as in
357 struct attribute_spec.handler. */
359 arc_handle_interrupt_attribute (tree
*node ATTRIBUTE_UNUSED
,
362 int flags ATTRIBUTE_UNUSED
,
365 tree value
= TREE_VALUE (args
);
367 if (TREE_CODE (value
) != STRING_CST
)
369 warning ("argument of `%s' attribute is not a string constant",
370 IDENTIFIER_POINTER (name
));
371 *no_add_attrs
= true;
373 else if (strcmp (TREE_STRING_POINTER (value
), "ilink1")
374 && strcmp (TREE_STRING_POINTER (value
), "ilink2"))
376 warning ("argument of `%s' attribute is not \"ilink1\" or \"ilink2\"",
377 IDENTIFIER_POINTER (name
));
378 *no_add_attrs
= true;
385 /* Acceptable arguments to the call insn. */
388 call_address_operand (rtx op
, enum machine_mode mode
)
390 return (symbolic_operand (op
, mode
)
391 || (GET_CODE (op
) == CONST_INT
&& LEGITIMATE_CONSTANT_P (op
))
392 || (GET_CODE (op
) == REG
));
396 call_operand (rtx op
, enum machine_mode mode
)
398 if (GET_CODE (op
) != MEM
)
401 return call_address_operand (op
, mode
);
404 /* Returns 1 if OP is a symbol reference. */
407 symbolic_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
409 switch (GET_CODE (op
))
420 /* Return truth value of statement that OP is a symbolic memory
421 operand of mode MODE. */
424 symbolic_memory_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
426 if (GET_CODE (op
) == SUBREG
)
427 op
= SUBREG_REG (op
);
428 if (GET_CODE (op
) != MEM
)
431 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
432 || GET_CODE (op
) == LABEL_REF
);
435 /* Return true if OP is a short immediate (shimm) value. */
438 short_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
440 if (GET_CODE (op
) != CONST_INT
)
442 return SMALL_INT (INTVAL (op
));
445 /* Return true if OP will require a long immediate (limm) value.
446 This is currently only used when calculating length attributes. */
449 long_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
451 switch (GET_CODE (op
))
458 return !SMALL_INT (INTVAL (op
));
460 /* These can happen because large unsigned 32 bit constants are
461 represented this way (the multiplication patterns can cause these
462 to be generated). They also occur for SFmode values. */
470 /* Return true if OP is a MEM that when used as a load or store address will
471 require an 8 byte insn.
472 Load and store instructions don't allow the same possibilities but they're
473 similar enough that this one function will do.
474 This is currently only used when calculating length attributes. */
477 long_immediate_loadstore_operand (rtx op
,
478 enum machine_mode mode ATTRIBUTE_UNUSED
)
480 if (GET_CODE (op
) != MEM
)
484 switch (GET_CODE (op
))
491 /* This must be handled as "st c,[limm]". Ditto for load.
492 Technically, the assembler could translate some possibilities to
493 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
494 assume that it does. */
497 /* These can happen because large unsigned 32 bit constants are
498 represented this way (the multiplication patterns can cause these
499 to be generated). They also occur for SFmode values. */
504 if (GET_CODE (XEXP (op
, 1)) == CONST_INT
505 && !SMALL_INT (INTVAL (XEXP (op
, 1))))
514 /* Return true if OP is an acceptable argument for a single word
518 move_src_operand (rtx op
, enum machine_mode mode
)
520 switch (GET_CODE (op
))
527 return (LARGE_INT (INTVAL (op
)));
529 /* We can handle DImode integer constants in SImode if the value
530 (signed or unsigned) will fit in 32 bits. This is needed because
531 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
533 return arc_double_limm_p (op
);
534 /* We can handle 32 bit floating point constants. */
536 return GET_MODE (op
) == SFmode
;
539 return register_operand (op
, mode
);
541 /* (subreg (mem ...) ...) can occur here if the inner part was once a
542 pseudo-reg and is now a stack slot. */
543 if (GET_CODE (SUBREG_REG (op
)) == MEM
)
544 return address_operand (XEXP (SUBREG_REG (op
), 0), mode
);
546 return register_operand (op
, mode
);
548 return address_operand (XEXP (op
, 0), mode
);
554 /* Return true if OP is an acceptable argument for a double word
558 move_double_src_operand (rtx op
, enum machine_mode mode
)
560 switch (GET_CODE (op
))
563 return register_operand (op
, mode
);
565 /* (subreg (mem ...) ...) can occur here if the inner part was once a
566 pseudo-reg and is now a stack slot. */
567 if (GET_CODE (SUBREG_REG (op
)) == MEM
)
568 return move_double_src_operand (SUBREG_REG (op
), mode
);
570 return register_operand (op
, mode
);
572 /* Disallow auto inc/dec for now. */
573 if (GET_CODE (XEXP (op
, 0)) == PRE_DEC
574 || GET_CODE (XEXP (op
, 0)) == PRE_INC
)
576 return address_operand (XEXP (op
, 0), mode
);
585 /* Return true if OP is an acceptable argument for a move destination. */
588 move_dest_operand (rtx op
, enum machine_mode mode
)
590 switch (GET_CODE (op
))
593 return register_operand (op
, mode
);
595 /* (subreg (mem ...) ...) can occur here if the inner part was once a
596 pseudo-reg and is now a stack slot. */
597 if (GET_CODE (SUBREG_REG (op
)) == MEM
)
598 return address_operand (XEXP (SUBREG_REG (op
), 0), mode
);
600 return register_operand (op
, mode
);
602 return address_operand (XEXP (op
, 0), mode
);
608 /* Return true if OP is valid load with update operand. */
611 load_update_operand (rtx op
, enum machine_mode mode
)
613 if (GET_CODE (op
) != MEM
614 || GET_MODE (op
) != mode
)
617 if (GET_CODE (op
) != PLUS
618 || GET_MODE (op
) != Pmode
619 || !register_operand (XEXP (op
, 0), Pmode
)
620 || !nonmemory_operand (XEXP (op
, 1), Pmode
))
625 /* Return true if OP is valid store with update operand. */
628 store_update_operand (rtx op
, enum machine_mode mode
)
630 if (GET_CODE (op
) != MEM
631 || GET_MODE (op
) != mode
)
634 if (GET_CODE (op
) != PLUS
635 || GET_MODE (op
) != Pmode
636 || !register_operand (XEXP (op
, 0), Pmode
)
637 || !(GET_CODE (XEXP (op
, 1)) == CONST_INT
638 && SMALL_INT (INTVAL (XEXP (op
, 1)))))
643 /* Return true if OP is a non-volatile non-immediate operand.
644 Volatile memory refs require a special "cache-bypass" instruction
645 and only the standard movXX patterns are set up to handle them. */
648 nonvol_nonimm_operand (rtx op
, enum machine_mode mode
)
650 if (GET_CODE (op
) == MEM
&& MEM_VOLATILE_P (op
))
652 return nonimmediate_operand (op
, mode
);
655 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
656 to check the range carefully since this predicate is used in DImode
660 const_sint32_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
662 /* All allowed constants will fit a CONST_INT. */
663 return (GET_CODE (op
) == CONST_INT
664 && (INTVAL (op
) >= (-0x7fffffff - 1) && INTVAL (op
) <= 0x7fffffff));
667 /* Accept integer operands in the range 0..0xffffffff. We have to check the
668 range carefully since this predicate is used in DImode contexts. Also, we
669 need some extra crud to make it work when hosted on 64-bit machines. */
672 const_uint32_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
674 #if HOST_BITS_PER_WIDE_INT > 32
675 /* All allowed constants will fit a CONST_INT. */
676 return (GET_CODE (op
) == CONST_INT
677 && (INTVAL (op
) >= 0 && INTVAL (op
) <= 0xffffffffL
));
679 return ((GET_CODE (op
) == CONST_INT
&& INTVAL (op
) >= 0)
680 || (GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_HIGH (op
) == 0));
684 /* Return 1 if OP is a comparison operator valid for the mode of CC.
685 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
687 Some insns only set a few bits in the condition code. So only allow those
688 comparisons that use the bits that are valid. */
691 proper_comparison_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
693 enum rtx_code code
= GET_CODE (op
);
695 if (GET_RTX_CLASS (code
) != '<')
698 if (GET_MODE (XEXP (op
, 0)) == CCZNmode
)
699 return (code
== EQ
|| code
== NE
);
700 if (GET_MODE (XEXP (op
, 0)) == CCZNCmode
)
701 return (code
== EQ
|| code
== NE
702 || code
== LTU
|| code
== GEU
|| code
== GTU
|| code
== LEU
);
706 /* Misc. utilities. */
708 /* X and Y are two things to compare using CODE. Emit the compare insn and
709 return the rtx for the cc reg in the proper mode. */
712 gen_compare_reg (enum rtx_code code
, rtx x
, rtx y
)
714 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
717 cc_reg
= gen_rtx_REG (mode
, 61);
719 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
720 gen_rtx_COMPARE (mode
, x
, y
)));
725 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
726 We assume the value can be either signed or unsigned. */
729 arc_double_limm_p (rtx value
)
731 HOST_WIDE_INT low
, high
;
733 if (GET_CODE (value
) != CONST_DOUBLE
)
736 low
= CONST_DOUBLE_LOW (value
);
737 high
= CONST_DOUBLE_HIGH (value
);
739 if (low
& 0x80000000)
741 return (((unsigned HOST_WIDE_INT
) low
<= 0xffffffff && high
== 0)
742 || (((low
& - (unsigned HOST_WIDE_INT
) 0x80000000)
743 == - (unsigned HOST_WIDE_INT
) 0x80000000)
748 return (unsigned HOST_WIDE_INT
) low
<= 0x7fffffff && high
== 0;
752 /* Do any needed setup for a variadic function. For the ARC, we must
753 create a register parameter block, and then copy any anonymous arguments
754 in registers to memory.
756 CUM has not been updated for the last named argument which has type TYPE
757 and mode MODE, and we rely on this fact.
759 We do things a little weird here. We're supposed to only allocate space
760 for the anonymous arguments. However we need to keep the stack eight byte
761 aligned. So we round the space up if necessary, and leave it to va_start
765 arc_setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
766 enum machine_mode mode
,
767 tree type ATTRIBUTE_UNUSED
,
773 /* All BLKmode values are passed by reference. */
777 first_anon_arg
= *cum
+ ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1)
780 if (first_anon_arg
< MAX_ARC_PARM_REGS
&& !no_rtl
)
782 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
783 int first_reg_offset
= first_anon_arg
;
784 /* Size in words to "pretend" allocate. */
785 int size
= MAX_ARC_PARM_REGS
- first_reg_offset
;
786 /* Extra slop to keep stack eight byte aligned. */
787 int align_slop
= size
& 1;
790 regblock
= gen_rtx_MEM (BLKmode
,
791 plus_constant (arg_pointer_rtx
,
792 FIRST_PARM_OFFSET (0)
793 + align_slop
* UNITS_PER_WORD
));
794 set_mem_alias_set (regblock
, get_varargs_alias_set ());
795 set_mem_align (regblock
, BITS_PER_WORD
);
796 move_block_from_reg (first_reg_offset
, regblock
,
797 MAX_ARC_PARM_REGS
- first_reg_offset
);
799 *pretend_size
= ((MAX_ARC_PARM_REGS
- first_reg_offset
+ align_slop
)
804 /* Cost functions. */
806 /* Compute a (partial) cost for rtx X. Return true if the complete
807 cost has been computed, and false if subexpressions should be
808 scanned. In either case, *TOTAL contains the cost result. */
811 arc_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
)
815 /* Small integers are as cheap as registers. 4 byte values can
816 be fetched as immediate constants - let's give that the cost
819 if (SMALL_INT (INTVAL (x
)))
829 *total
= COSTS_N_INSNS (1);
835 split_double (x
, &high
, &low
);
836 *total
= COSTS_N_INSNS (!SMALL_INT (INTVAL (high
))
837 + !SMALL_INT (INTVAL (low
)));
841 /* Encourage synth_mult to find a synthetic multiply when reasonable.
842 If we need more than 12 insns to do a multiply, then go out-of-line,
843 since the call overhead will be < 10% of the cost of the multiply. */
848 *total
= COSTS_N_INSNS (1);
849 else if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
850 *total
= COSTS_N_INSNS (16);
852 *total
= COSTS_N_INSNS (INTVAL (XEXP ((x
), 1)));
861 /* Provide the costs of an addressing mode that contains ADDR.
862 If ADDR is not a valid address, its cost is irrelevant. */
865 arc_address_cost (rtx addr
)
867 switch (GET_CODE (addr
))
879 register rtx plus0
= XEXP (addr
, 0);
880 register rtx plus1
= XEXP (addr
, 1);
882 if (GET_CODE (plus0
) != REG
)
885 switch (GET_CODE (plus1
))
888 return SMALL_INT (plus1
) ? 1 : 2;
905 /* Function prologue/epilogue handlers. */
907 /* ARC stack frames look like:
909 Before call After call
910 +-----------------------+ +-----------------------+
912 high | local variables, | | local variables, |
913 mem | reg save area, etc. | | reg save area, etc. |
915 +-----------------------+ +-----------------------+
917 | arguments on stack. | | arguments on stack. |
919 SP+16->+-----------------------+FP+48->+-----------------------+
920 | 4 word save area for | | reg parm save area, |
921 | return addr, prev %fp | | only created for |
922 SP+0->+-----------------------+ | variable argument |
924 FP+16->+-----------------------+
925 | 4 word save area for |
926 | return addr, prev %fp |
927 FP+0->+-----------------------+
931 +-----------------------+
933 | register save area |
935 +-----------------------+
937 | alloca allocations |
939 +-----------------------+
941 | arguments on stack |
943 SP+16->+-----------------------+
944 low | 4 word save area for |
945 memory | return addr, prev %fp |
946 SP+0->+-----------------------+
949 1) The "reg parm save area" does not exist for non variable argument fns.
950 The "reg parm save area" can be eliminated completely if we created our
951 own va-arc.h, but that has tradeoffs as well (so it's not done). */
953 /* Structure to be filled in by arc_compute_frame_size with register
954 save masks, and offsets for the current function. */
955 struct arc_frame_info
957 unsigned int total_size
; /* # bytes that the entire frame takes up. */
958 unsigned int extra_size
; /* # bytes of extra stuff. */
959 unsigned int pretend_size
; /* # bytes we push and pretend caller did. */
960 unsigned int args_size
; /* # bytes that outgoing arguments take up. */
961 unsigned int reg_size
; /* # bytes needed to store regs. */
962 unsigned int var_size
; /* # bytes that variables take up. */
963 unsigned int reg_offset
; /* Offset from new sp to store regs. */
964 unsigned int gmask
; /* Mask of saved gp registers. */
965 int initialized
; /* Nonzero if frame size already calculated. */
968 /* Current frame information calculated by arc_compute_frame_size. */
969 static struct arc_frame_info current_frame_info
;
971 /* Zero structure to initialize current_frame_info. */
972 static struct arc_frame_info zero_frame_info
;
974 /* Type of function DECL.
976 The result is cached. To reset the cache at the end of a function,
977 call with DECL = NULL_TREE. */
979 enum arc_function_type
980 arc_compute_function_type (tree decl
)
984 static enum arc_function_type fn_type
= ARC_FUNCTION_UNKNOWN
;
985 /* Last function we were called for. */
986 static tree last_fn
= NULL_TREE
;
988 /* Resetting the cached value? */
989 if (decl
== NULL_TREE
)
991 fn_type
= ARC_FUNCTION_UNKNOWN
;
996 if (decl
== last_fn
&& fn_type
!= ARC_FUNCTION_UNKNOWN
)
999 /* Assume we have a normal function (not an interrupt handler). */
1000 fn_type
= ARC_FUNCTION_NORMAL
;
1002 /* Now see if this is an interrupt handler. */
1003 for (a
= DECL_ATTRIBUTES (current_function_decl
);
1007 tree name
= TREE_PURPOSE (a
), args
= TREE_VALUE (a
);
1009 if (name
== get_identifier ("__interrupt__")
1010 && list_length (args
) == 1
1011 && TREE_CODE (TREE_VALUE (args
)) == STRING_CST
)
1013 tree value
= TREE_VALUE (args
);
1015 if (!strcmp (TREE_STRING_POINTER (value
), "ilink1"))
1016 fn_type
= ARC_FUNCTION_ILINK1
;
1017 else if (!strcmp (TREE_STRING_POINTER (value
), "ilink2"))
1018 fn_type
= ARC_FUNCTION_ILINK2
;
1029 #define ILINK1_REGNUM 29
1030 #define ILINK2_REGNUM 30
1031 #define RETURN_ADDR_REGNUM 31
1032 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1033 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1035 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1036 The return address and frame pointer are treated separately.
1037 Don't consider them here. */
1038 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1039 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1040 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1042 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1044 /* Return the bytes needed to compute the frame pointer from the current
1047 SIZE is the size needed for local variables. */
1050 arc_compute_frame_size (int size
/* # of var. bytes allocated. */)
1053 unsigned int total_size
, var_size
, args_size
, pretend_size
, extra_size
;
1054 unsigned int reg_size
, reg_offset
;
1056 enum arc_function_type fn_type
;
1060 args_size
= current_function_outgoing_args_size
;
1061 pretend_size
= current_function_pretend_args_size
;
1062 extra_size
= FIRST_PARM_OFFSET (0);
1063 total_size
= extra_size
+ pretend_size
+ args_size
+ var_size
;
1064 reg_offset
= FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size
;
1068 /* See if this is an interrupt handler. Call used registers must be saved
1070 fn_type
= arc_compute_function_type (current_function_decl
);
1071 interrupt_p
= ARC_INTERRUPT_P (fn_type
);
1073 /* Calculate space needed for registers.
1074 ??? We ignore the extension registers for now. */
1076 for (regno
= 0; regno
<= 31; regno
++)
1078 if (MUST_SAVE_REGISTER (regno
, interrupt_p
))
1080 reg_size
+= UNITS_PER_WORD
;
1081 gmask
|= 1 << regno
;
1085 total_size
+= reg_size
;
1087 /* If the only space to allocate is the fp/blink save area this is an
1088 empty frame. However, if we'll be making a function call we need to
1089 allocate a stack frame for our callee's fp/blink save area. */
1090 if (total_size
== extra_size
1091 && !MUST_SAVE_RETURN_ADDR
)
1092 total_size
= extra_size
= 0;
1094 total_size
= ARC_STACK_ALIGN (total_size
);
1096 /* Save computed information. */
1097 current_frame_info
.total_size
= total_size
;
1098 current_frame_info
.extra_size
= extra_size
;
1099 current_frame_info
.pretend_size
= pretend_size
;
1100 current_frame_info
.var_size
= var_size
;
1101 current_frame_info
.args_size
= args_size
;
1102 current_frame_info
.reg_size
= reg_size
;
1103 current_frame_info
.reg_offset
= reg_offset
;
1104 current_frame_info
.gmask
= gmask
;
1105 current_frame_info
.initialized
= reload_completed
;
1107 /* Ok, we're done. */
1111 /* Common code to save/restore registers. */
1114 arc_save_restore (FILE *file
,
1115 const char *base_reg
,
1116 unsigned int offset
,
1125 for (regno
= 0; regno
<= 31; regno
++)
1127 if ((gmask
& (1L << regno
)) != 0)
1129 fprintf (file
, "\t%s %s,[%s,%d]\n",
1130 op
, reg_names
[regno
], base_reg
, offset
);
1131 offset
+= UNITS_PER_WORD
;
1136 /* Target hook to assemble an integer object. The ARC version needs to
1137 emit a special directive for references to labels and function
1141 arc_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
1143 if (size
== UNITS_PER_WORD
&& aligned_p
1144 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
1145 || GET_CODE (x
) == LABEL_REF
))
1147 fputs ("\t.word\t%st(", asm_out_file
);
1148 output_addr_const (asm_out_file
, x
);
1149 fputs (")\n", asm_out_file
);
1152 return default_assemble_integer (x
, size
, aligned_p
);
1155 /* Set up the stack and frame pointer (if desired) for the function. */
1158 arc_output_function_prologue (FILE *file
, HOST_WIDE_INT size
)
1160 const char *sp_str
= reg_names
[STACK_POINTER_REGNUM
];
1161 const char *fp_str
= reg_names
[FRAME_POINTER_REGNUM
];
1162 unsigned int gmask
= current_frame_info
.gmask
;
1163 enum arc_function_type fn_type
= arc_compute_function_type (current_function_decl
);
1165 /* If this is an interrupt handler, set up our stack frame.
1166 ??? Optimize later. */
1167 if (ARC_INTERRUPT_P (fn_type
))
1169 fprintf (file
, "\t%s interrupt handler\n",
1171 fprintf (file
, "\tsub %s,%s,16\n", sp_str
, sp_str
);
1174 /* This is only for the human reader. */
1175 fprintf (file
, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1176 ASM_COMMENT_START
, ASM_COMMENT_START
,
1177 current_frame_info
.var_size
,
1178 current_frame_info
.reg_size
/ 4,
1179 current_frame_info
.args_size
,
1180 current_frame_info
.extra_size
);
1182 size
= ARC_STACK_ALIGN (size
);
1183 size
= (! current_frame_info
.initialized
1184 ? arc_compute_frame_size (size
)
1185 : current_frame_info
.total_size
);
1187 /* These cases shouldn't happen. Catch them now. */
1188 if (size
== 0 && gmask
)
1191 /* Allocate space for register arguments if this is a variadic function. */
1192 if (current_frame_info
.pretend_size
!= 0)
1193 fprintf (file
, "\tsub %s,%s,%d\n",
1194 sp_str
, sp_str
, current_frame_info
.pretend_size
);
1196 /* The home-grown ABI says link register is saved first. */
1197 if (MUST_SAVE_RETURN_ADDR
)
1198 fprintf (file
, "\tst %s,[%s,%d]\n",
1199 reg_names
[RETURN_ADDR_REGNUM
], sp_str
, UNITS_PER_WORD
);
1201 /* Set up the previous frame pointer next (if we need to). */
1202 if (frame_pointer_needed
)
1204 fprintf (file
, "\tst %s,[%s]\n", fp_str
, sp_str
);
1205 fprintf (file
, "\tmov %s,%s\n", fp_str
, sp_str
);
1208 /* ??? We don't handle the case where the saved regs are more than 252
1209 bytes away from sp. This can be handled by decrementing sp once, saving
1210 the regs, and then decrementing it again. The epilogue doesn't have this
1211 problem as the `ld' insn takes reg+limm values (though it would be more
1212 efficient to avoid reg+limm). */
1214 /* Allocate the stack frame. */
1215 if (size
- current_frame_info
.pretend_size
> 0)
1216 fprintf (file
, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC
"\n",
1217 sp_str
, sp_str
, size
- current_frame_info
.pretend_size
);
1219 /* Save any needed call-saved regs (and call-used if this is an
1220 interrupt handler). */
1221 arc_save_restore (file
, sp_str
, current_frame_info
.reg_offset
,
1222 /* The zeroing of these two bits is unnecessary,
1223 but leave this in for clarity. */
1224 gmask
& ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
),
1227 fprintf (file
, "\t%s END PROLOGUE\n", ASM_COMMENT_START
);
1230 /* Do any necessary cleanup after a function to restore stack, frame,
1234 arc_output_function_epilogue (FILE *file
, HOST_WIDE_INT size
)
1236 rtx epilogue_delay
= current_function_epilogue_delay_list
;
1237 int noepilogue
= FALSE
;
1238 enum arc_function_type fn_type
= arc_compute_function_type (current_function_decl
);
1240 /* This is only for the human reader. */
1241 fprintf (file
, "\t%s EPILOGUE\n", ASM_COMMENT_START
);
1243 size
= ARC_STACK_ALIGN (size
);
1244 size
= (!current_frame_info
.initialized
1245 ? arc_compute_frame_size (size
)
1246 : current_frame_info
.total_size
);
1248 if (size
== 0 && epilogue_delay
== 0)
1250 rtx insn
= get_last_insn ();
1252 /* If the last insn was a BARRIER, we don't have to write any code
1253 because a jump (aka return) was put there. */
1254 if (GET_CODE (insn
) == NOTE
)
1255 insn
= prev_nonnote_insn (insn
);
1256 if (insn
&& GET_CODE (insn
) == BARRIER
)
1262 unsigned int pretend_size
= current_frame_info
.pretend_size
;
1263 unsigned int frame_size
= size
- pretend_size
;
1264 int restored
, fp_restored_p
;
1265 int can_trust_sp_p
= !current_function_calls_alloca
;
1266 const char *sp_str
= reg_names
[STACK_POINTER_REGNUM
];
1267 const char *fp_str
= reg_names
[FRAME_POINTER_REGNUM
];
1269 /* ??? There are lots of optimizations that can be done here.
1270 EG: Use fp to restore regs if it's closer.
1271 Maybe in time we'll do them all. For now, always restore regs from
1272 sp, but don't restore sp if we don't have to. */
1274 if (!can_trust_sp_p
)
1276 if (!frame_pointer_needed
)
1278 fprintf (file
,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1279 sp_str
, fp_str
, frame_size
, ASM_COMMENT_START
);
1282 /* Restore any saved registers. */
1283 arc_save_restore (file
, sp_str
, current_frame_info
.reg_offset
,
1284 /* The zeroing of these two bits is unnecessary,
1285 but leave this in for clarity. */
1286 current_frame_info
.gmask
& ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
),
1289 if (MUST_SAVE_RETURN_ADDR
)
1290 fprintf (file
, "\tld %s,[%s,%d]\n",
1291 reg_names
[RETURN_ADDR_REGNUM
],
1292 frame_pointer_needed
? fp_str
: sp_str
,
1293 UNITS_PER_WORD
+ (frame_pointer_needed
? 0 : frame_size
));
1295 /* Keep track of how much of the stack pointer we've restored.
1296 It makes the following a lot more readable. */
1300 /* We try to emit the epilogue delay slot insn right after the load
1301 of the return address register so that it can execute with the
1302 stack intact. Secondly, loads are delayed. */
1303 /* ??? If stack intactness is important, always emit now. */
1304 if (MUST_SAVE_RETURN_ADDR
&& epilogue_delay
!= NULL_RTX
)
1306 final_scan_insn (XEXP (epilogue_delay
, 0), file
, 1, -2, 1, NULL
);
1307 epilogue_delay
= NULL_RTX
;
1310 if (frame_pointer_needed
)
1312 /* Try to restore the frame pointer in the delay slot. We can't,
1313 however, if any of these is true. */
1314 if (epilogue_delay
!= NULL_RTX
1315 || !SMALL_INT (frame_size
)
1317 || ARC_INTERRUPT_P (fn_type
))
1319 /* Note that we restore fp and sp here! */
1320 fprintf (file
, "\tld.a %s,[%s,%d]\n", fp_str
, sp_str
, frame_size
);
1321 restored
+= frame_size
;
1325 else if (!SMALL_INT (size
/* frame_size + pretend_size */)
1326 || ARC_INTERRUPT_P (fn_type
))
1328 fprintf (file
, "\tadd %s,%s,%d\n", sp_str
, sp_str
, frame_size
);
1329 restored
+= frame_size
;
1332 /* These must be done before the return insn because the delay slot
1333 does the final stack restore. */
1334 if (ARC_INTERRUPT_P (fn_type
))
1338 final_scan_insn (XEXP (epilogue_delay
, 0), file
, 1, -2, 1,
1343 /* Emit the return instruction. */
1345 static const int regs
[4] = {
1346 0, RETURN_ADDR_REGNUM
, ILINK1_REGNUM
, ILINK2_REGNUM
1348 fprintf (file
, "\tj.d %s\n", reg_names
[regs
[fn_type
]]);
1351 /* If the only register saved is the return address, we need a
1352 nop, unless we have an instruction to put into it. Otherwise
1353 we don't since reloading multiple registers doesn't reference
1354 the register being loaded. */
1356 if (ARC_INTERRUPT_P (fn_type
))
1357 fprintf (file
, "\tadd %s,%s,16\n", sp_str
, sp_str
);
1358 else if (epilogue_delay
!= NULL_RTX
)
1360 if (frame_pointer_needed
&& !fp_restored_p
)
1362 if (restored
< size
)
1364 final_scan_insn (XEXP (epilogue_delay
, 0), file
, 1, -2, 1, NULL
);
1366 else if (frame_pointer_needed
&& !fp_restored_p
)
1368 if (!SMALL_INT (frame_size
))
1370 /* Note that we restore fp and sp here! */
1371 fprintf (file
, "\tld.a %s,[%s,%d]\n", fp_str
, sp_str
, frame_size
);
1373 else if (restored
< size
)
1375 if (!SMALL_INT (size
- restored
))
1377 fprintf (file
, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC
"\n",
1378 sp_str
, sp_str
, size
- restored
);
1381 fprintf (file
, "\tnop\n");
1384 /* Reset state info for each function. */
1385 current_frame_info
= zero_frame_info
;
1386 arc_compute_function_type (NULL_TREE
);
1389 /* Define the number of delay slots needed for the function epilogue.
1391 Interrupt handlers can't have any epilogue delay slots (it's always needed
1392 for something else, I think). For normal functions, we have to worry about
1393 using call-saved regs as they'll be restored before the delay slot insn.
1394 Functions with non-empty frames already have enough choices for the epilogue
1395 delay slot so for now we only consider functions with empty frames. */
1398 arc_delay_slots_for_epilogue (void)
1400 if (arc_compute_function_type (current_function_decl
) != ARC_FUNCTION_NORMAL
)
1402 if (!current_frame_info
.initialized
)
1403 (void) arc_compute_frame_size (get_frame_size ());
1404 if (current_frame_info
.total_size
== 0)
1409 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1410 Any single length instruction which doesn't reference the stack or frame
1411 pointer or any call-saved register is OK. SLOT will always be 0. */
1414 arc_eligible_for_epilogue_delay (rtx trial
, int slot
)
1419 if (get_attr_length (trial
) == 1
1420 /* If registers where saved, presumably there's more than enough
1421 possibilities for the delay slot. The alternative is something
1422 more complicated (of course, if we expanded the epilogue as rtl
1423 this problem would go away). */
1424 /* ??? Note that this will always be true since only functions with
1425 empty frames have epilogue delay slots. See
1426 arc_delay_slots_for_epilogue. */
1427 && current_frame_info
.gmask
== 0
1428 && ! reg_mentioned_p (stack_pointer_rtx
, PATTERN (trial
))
1429 && ! reg_mentioned_p (frame_pointer_rtx
, PATTERN (trial
)))
1436 /* Emit special PIC prologues and epilogues. */
1439 arc_finalize_pic (void)
1444 /* Return true if OP is a shift operator. */
1447 shift_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1449 switch (GET_CODE (op
))
1460 /* Output the assembler code for doing a shift.
1461 We go to a bit of trouble to generate efficient code as the ARC only has
1462 single bit shifts. This is taken from the h8300 port. We only have one
1463 mode of shifting and can't access individual bytes like the h8300 can, so
1464 this is greatly simplified (at the expense of not generating hyper-
1467 This function is not used if the variable shift insns are present. */
1469 /* ??? We assume the output operand is the same as operand 1.
1470 This can be optimized (deleted) in the case of 1 bit shifts. */
1471 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1472 using it here will give us a chance to play with it. */
1475 output_shift (rtx
*operands
)
1477 rtx shift
= operands
[3];
1478 enum machine_mode mode
= GET_MODE (shift
);
1479 enum rtx_code code
= GET_CODE (shift
);
1480 const char *shift_one
;
1487 case ASHIFT
: shift_one
= "asl %0,%0"; break;
1488 case ASHIFTRT
: shift_one
= "asr %0,%0"; break;
1489 case LSHIFTRT
: shift_one
= "lsr %0,%0"; break;
1493 if (GET_CODE (operands
[2]) != CONST_INT
)
1496 output_asm_insn ("mov lp_count,%2", operands
);
1498 output_asm_insn ("mov %4,%2", operands
);
1503 int n
= INTVAL (operands
[2]);
1505 /* If the count is negative, make it 0. */
1508 /* If the count is too big, truncate it.
1509 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1510 do the intuitive thing. */
1511 else if (n
> GET_MODE_BITSIZE (mode
))
1512 n
= GET_MODE_BITSIZE (mode
);
1514 /* First see if we can do them inline. */
1518 output_asm_insn (shift_one
, operands
);
1520 /* See if we can use a rotate/and. */
1521 else if (n
== BITS_PER_WORD
- 1)
1526 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands
);
1529 /* The ARC doesn't have a rol insn. Use something else. */
1530 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands
);
1533 /* The ARC doesn't have a rol insn. Use something else. */
1534 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands
);
1546 output_asm_insn ("mov lp_count,%c2", operands
);
1548 output_asm_insn ("mov %4,%c2", operands
);
1553 sprintf (buf
, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1556 sprintf (buf
, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1558 output_asm_insn (buf
, operands
);
1559 output_asm_insn ("sr %4,[lp_start]", operands
);
1560 output_asm_insn ("add %4,%4,1", operands
);
1561 output_asm_insn ("sr %4,[lp_end]", operands
);
1562 output_asm_insn ("nop\n\tnop", operands
);
1564 fprintf (asm_out_file
, "\t%s single insn loop\n",
1567 fprintf (asm_out_file
, "1:\t%s single insn loop\n",
1569 output_asm_insn (shift_one
, operands
);
1573 fprintf (asm_out_file
, "1:\t%s begin shift loop\n",
1575 output_asm_insn ("sub.f %4,%4,1", operands
);
1576 output_asm_insn ("nop", operands
);
1577 output_asm_insn ("bn.nd 2f", operands
);
1578 output_asm_insn (shift_one
, operands
);
1579 output_asm_insn ("b.nd 1b", operands
);
1580 fprintf (asm_out_file
, "2:\t%s end shift loop\n",
1589 /* Nested function support. */
1591 /* Emit RTL insns to initialize the variable parts of a trampoline.
1592 FNADDR is an RTX for the address of the function's pure code.
1593 CXT is an RTX for the static chain value for the function. */
1596 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED
,
1597 rtx fnaddr ATTRIBUTE_UNUSED
,
1598 rtx cxt ATTRIBUTE_UNUSED
)
1602 /* Set the cpu type and print out other fancy things,
1603 at the top of the file. */
1606 arc_file_start (void)
1608 default_file_start ();
1609 fprintf (asm_out_file
, "\t.cpu %s\n", arc_cpu_string
);
1612 /* Print operand X (an rtx) in assembler syntax to file FILE.
1613 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1614 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1617 arc_print_operand (FILE *file
, rtx x
, int code
)
1622 /* Conditional branches. For now these are equivalent. */
1624 /* Unconditional branches. Output the appropriate delay slot suffix. */
1625 if (!final_sequence
|| XVECLEN (final_sequence
, 0) == 1)
1627 /* There's nothing in the delay slot. */
1628 fputs (".nd", file
);
1632 rtx jump
= XVECEXP (final_sequence
, 0, 0);
1633 rtx delay
= XVECEXP (final_sequence
, 0, 1);
1634 if (INSN_ANNULLED_BRANCH_P (jump
))
1635 fputs (INSN_FROM_TARGET_P (delay
) ? ".jd" : ".nd", file
);
1640 case '?' : /* with leading "." */
1641 case '!' : /* without leading "." */
1642 /* This insn can be conditionally executed. See if the ccfsm machinery
1643 says it should be conditionalized. */
1644 if (arc_ccfsm_state
== 3 || arc_ccfsm_state
== 4)
1646 /* Is this insn in a delay slot? */
1647 if (final_sequence
&& XVECLEN (final_sequence
, 0) == 2)
1649 rtx insn
= XVECEXP (final_sequence
, 0, 1);
1651 /* If the insn is annulled and is from the target path, we need
1652 to inverse the condition test. */
1653 if (INSN_ANNULLED_BRANCH_P (insn
))
1655 if (INSN_FROM_TARGET_P (insn
))
1656 fprintf (file
, "%s%s",
1657 code
== '?' ? "." : "",
1658 arc_condition_codes
[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc
)]);
1660 fprintf (file
, "%s%s",
1661 code
== '?' ? "." : "",
1662 arc_condition_codes
[arc_ccfsm_current_cc
]);
1666 /* This insn is executed for either path, so don't
1667 conditionalize it at all. */
1668 ; /* nothing to do */
1673 /* This insn isn't in a delay slot. */
1674 fprintf (file
, "%s%s",
1675 code
== '?' ? "." : "",
1676 arc_condition_codes
[arc_ccfsm_current_cc
]);
1681 /* Output a nop if we're between a set of the condition codes,
1682 and a conditional branch. */
1683 if (last_insn_set_cc_p
)
1684 fputs ("nop\n\t", file
);
1687 fputs (arc_condition_codes
[get_arc_condition_code (x
)], file
);
1690 fputs (arc_condition_codes
[ARC_INVERSE_CONDITION_CODE
1691 (get_arc_condition_code (x
))],
1695 /* Write second word of DImode or DFmode reference,
1696 register or memory. */
1697 if (GET_CODE (x
) == REG
)
1698 fputs (reg_names
[REGNO (x
)+1], file
);
1699 else if (GET_CODE (x
) == MEM
)
1702 /* Handle possible auto-increment. Since it is pre-increment and
1703 we have already done it, we can just use an offset of four. */
1704 /* ??? This is taken from rs6000.c I think. I don't think it is
1705 currently necessary, but keep it around. */
1706 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
1707 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
1708 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 4));
1710 output_address (plus_constant (XEXP (x
, 0), 4));
1714 output_operand_lossage ("invalid operand to %%R code");
1717 if ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
1718 || GET_CODE (x
) == LABEL_REF
)
1720 fprintf (file
, "%%st(");
1721 output_addr_const (file
, x
);
1722 fprintf (file
, ")");
1728 if (GET_CODE (x
) == REG
)
1730 /* L = least significant word, H = most significant word */
1731 if ((TARGET_BIG_ENDIAN
!= 0) ^ (code
== 'L'))
1732 fputs (reg_names
[REGNO (x
)], file
);
1734 fputs (reg_names
[REGNO (x
)+1], file
);
1736 else if (GET_CODE (x
) == CONST_INT
1737 || GET_CODE (x
) == CONST_DOUBLE
)
1741 split_double (x
, &first
, &second
);
1742 fprintf (file
, "0x%08lx",
1743 (long)(code
== 'L' ? INTVAL (first
) : INTVAL (second
)));
1746 output_operand_lossage ("invalid operand to %%H/%%L code");
1752 if (GET_CODE (x
) != CONST_DOUBLE
1753 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
)
1756 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (x
), sizeof (str
), 0, 1);
1757 fprintf (file
, "%s", str
);
1761 /* Output a load/store with update indicator if appropriate. */
1762 if (GET_CODE (x
) == MEM
)
1764 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
1765 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
1769 output_operand_lossage ("invalid operand to %%U code");
1772 /* Output cache bypass indicator for a load/store insn. Volatile memory
1773 refs are defined to use the cache bypass mechanism. */
1774 if (GET_CODE (x
) == MEM
)
1776 if (MEM_VOLATILE_P (x
))
1777 fputs (".di", file
);
1780 output_operand_lossage ("invalid operand to %%V code");
1783 /* Do nothing special. */
1787 output_operand_lossage ("invalid operand output code");
1790 switch (GET_CODE (x
))
1793 fputs (reg_names
[REGNO (x
)], file
);
1797 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
1798 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
1799 GET_MODE_SIZE (GET_MODE (x
))));
1800 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
1801 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
1802 - GET_MODE_SIZE (GET_MODE (x
))));
1804 output_address (XEXP (x
, 0));
1808 /* We handle SFmode constants here as output_addr_const doesn't. */
1809 if (GET_MODE (x
) == SFmode
)
1814 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1815 REAL_VALUE_TO_TARGET_SINGLE (d
, l
);
1816 fprintf (file
, "0x%08lx", l
);
1819 /* Fall through. Let output_addr_const deal with it. */
1821 output_addr_const (file
, x
);
1826 /* Print a memory address as an operand to reference that memory location. */
1829 arc_print_operand_address (FILE *file
, rtx addr
)
1831 register rtx base
, index
= 0;
1834 switch (GET_CODE (addr
))
1837 fputs (reg_names
[REGNO (addr
)], file
);
1840 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr
))
1842 fprintf (file
, "%%st(");
1843 output_addr_const (file
, addr
);
1844 fprintf (file
, ")");
1847 output_addr_const (file
, addr
);
1850 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
1851 offset
= INTVAL (XEXP (addr
, 0)), base
= XEXP (addr
, 1);
1852 else if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
1853 offset
= INTVAL (XEXP (addr
, 1)), base
= XEXP (addr
, 0);
1855 base
= XEXP (addr
, 0), index
= XEXP (addr
, 1);
1856 if (GET_CODE (base
) != REG
)
1858 fputs (reg_names
[REGNO (base
)], file
);
1862 fprintf (file
, ",%d", offset
);
1864 else if (GET_CODE (index
) == REG
)
1865 fprintf (file
, ",%s", reg_names
[REGNO (index
)]);
1866 else if (GET_CODE (index
) == SYMBOL_REF
)
1867 fputc (',', file
), output_addr_const (file
, index
);
1873 /* We shouldn't get here as we've lost the mode of the memory object
1874 (which says how much to inc/dec by. */
1878 output_addr_const (file
, addr
);
1883 /* Update compare/branch separation marker. */
1886 record_cc_ref (rtx insn
)
1888 last_insn_set_cc_p
= current_insn_set_cc_p
;
1890 switch (get_attr_cond (insn
))
1895 if (get_attr_length (insn
) == 1)
1896 current_insn_set_cc_p
= 1;
1898 current_insn_set_cc_p
= 0;
1901 current_insn_set_cc_p
= 0;
1906 /* Conditional execution support.
1908 This is based on the ARM port but for now is much simpler.
1910 A finite state machine takes care of noticing whether or not instructions
1911 can be conditionally executed, and thus decrease execution time and code
1912 size by deleting branch instructions. The fsm is controlled by
1913 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1914 in the .md file for the branch insns also have a hand in this. */
1916 /* The state of the fsm controlling condition codes are:
1917 0: normal, do nothing special
1918 1: don't output this insn
1919 2: don't output this insn
1920 3: make insns conditional
1921 4: make insns conditional
1923 State transitions (state->state by whom, under what condition):
1924 0 -> 1 final_prescan_insn, if insn is conditional branch
1925 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1926 1 -> 3 branch patterns, after having not output the conditional branch
1927 2 -> 4 branch patterns, after having not output the conditional branch
1928 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1929 (the target label has CODE_LABEL_NUMBER equal to
1930 arc_ccfsm_target_label).
1931 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1933 If the jump clobbers the conditions then we use states 2 and 4.
1935 A similar thing can be done with conditional return insns.
1937 We also handle separating branches from sets of the condition code.
1938 This is done here because knowledge of the ccfsm state is required,
1939 we may not be outputting the branch. */
1942 arc_final_prescan_insn (rtx insn
,
1943 rtx
*opvec ATTRIBUTE_UNUSED
,
1944 int noperands ATTRIBUTE_UNUSED
)
1946 /* BODY will hold the body of INSN. */
1947 register rtx body
= PATTERN (insn
);
1949 /* This will be 1 if trying to repeat the trick (ie: do the `else' part of
1950 an if/then/else), and things need to be reversed. */
1953 /* If we start with a return insn, we only succeed if we find another one. */
1954 int seeking_return
= 0;
1956 /* START_INSN will hold the insn from where we start looking. This is the
1957 first insn after the following code_label if REVERSE is true. */
1958 rtx start_insn
= insn
;
1960 /* Update compare/branch separation marker. */
1961 record_cc_ref (insn
);
1963 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1964 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1965 final_scan_insn which has `optimize' as a local. */
1966 if (optimize
< 2 || TARGET_NO_COND_EXEC
)
1969 /* If in state 4, check if the target branch is reached, in order to
1970 change back to state 0. */
1971 if (arc_ccfsm_state
== 4)
1973 if (insn
== arc_ccfsm_target_insn
)
1975 arc_ccfsm_target_insn
= NULL
;
1976 arc_ccfsm_state
= 0;
1981 /* If in state 3, it is possible to repeat the trick, if this insn is an
1982 unconditional branch to a label, and immediately following this branch
1983 is the previous target label which is only used once, and the label this
1984 branch jumps to is not too far off. Or in other words "we've done the
1985 `then' part, see if we can do the `else' part." */
1986 if (arc_ccfsm_state
== 3)
1988 if (simplejump_p (insn
))
1990 start_insn
= next_nonnote_insn (start_insn
);
1991 if (GET_CODE (start_insn
) == BARRIER
)
1993 /* ??? Isn't this always a barrier? */
1994 start_insn
= next_nonnote_insn (start_insn
);
1996 if (GET_CODE (start_insn
) == CODE_LABEL
1997 && CODE_LABEL_NUMBER (start_insn
) == arc_ccfsm_target_label
1998 && LABEL_NUSES (start_insn
) == 1)
2003 else if (GET_CODE (body
) == RETURN
)
2005 start_insn
= next_nonnote_insn (start_insn
);
2006 if (GET_CODE (start_insn
) == BARRIER
)
2007 start_insn
= next_nonnote_insn (start_insn
);
2008 if (GET_CODE (start_insn
) == CODE_LABEL
2009 && CODE_LABEL_NUMBER (start_insn
) == arc_ccfsm_target_label
2010 && LABEL_NUSES (start_insn
) == 1)
2022 if (GET_CODE (insn
) != JUMP_INSN
)
2025 /* This jump might be paralleled with a clobber of the condition codes,
2026 the jump should always come first. */
2027 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
2028 body
= XVECEXP (body
, 0, 0);
2031 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2032 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
2034 int insns_skipped
= 0, fail
= FALSE
, succeed
= FALSE
;
2035 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2036 int then_not_else
= TRUE
;
2037 /* Nonzero if next insn must be the target label. */
2038 int next_must_be_target_label_p
;
2039 rtx this_insn
= start_insn
, label
= 0;
2041 /* Register the insn jumped to. */
2044 if (!seeking_return
)
2045 label
= XEXP (SET_SRC (body
), 0);
2047 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
2048 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
2049 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
2051 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
2052 then_not_else
= FALSE
;
2054 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
2056 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
2059 then_not_else
= FALSE
;
2064 /* See how many insns this branch skips, and what kind of insns. If all
2065 insns are okay, and the label or unconditional branch to the same
2066 label is not too far away, succeed. */
2067 for (insns_skipped
= 0, next_must_be_target_label_p
= FALSE
;
2068 !fail
&& !succeed
&& insns_skipped
< MAX_INSNS_SKIPPED
;
2073 this_insn
= next_nonnote_insn (this_insn
);
2077 if (next_must_be_target_label_p
)
2079 if (GET_CODE (this_insn
) == BARRIER
)
2081 if (GET_CODE (this_insn
) == CODE_LABEL
2082 && this_insn
== label
)
2084 arc_ccfsm_state
= 1;
2092 scanbody
= PATTERN (this_insn
);
2094 switch (GET_CODE (this_insn
))
2097 /* Succeed if it is the target label, otherwise fail since
2098 control falls in from somewhere else. */
2099 if (this_insn
== label
)
2101 arc_ccfsm_state
= 1;
2109 /* Succeed if the following insn is the target label.
2111 If return insns are used then the last insn in a function
2112 will be a barrier. */
2113 next_must_be_target_label_p
= TRUE
;
2117 /* Can handle a call insn if there are no insns after it.
2118 IE: The next "insn" is the target label. We don't have to
2119 worry about delay slots as such insns are SEQUENCE's inside
2120 INSN's. ??? It is possible to handle such insns though. */
2121 if (get_attr_cond (this_insn
) == COND_CANUSE
)
2122 next_must_be_target_label_p
= TRUE
;
2128 /* If this is an unconditional branch to the same label, succeed.
2129 If it is to another label, do nothing. If it is conditional,
2131 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2133 if (GET_CODE (scanbody
) == SET
2134 && GET_CODE (SET_DEST (scanbody
)) == PC
)
2136 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
2137 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
2139 arc_ccfsm_state
= 2;
2142 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
2145 else if (GET_CODE (scanbody
) == RETURN
2148 arc_ccfsm_state
= 2;
2151 else if (GET_CODE (scanbody
) == PARALLEL
)
2153 if (get_attr_cond (this_insn
) != COND_CANUSE
)
2159 /* We can only do this with insns that can use the condition
2160 codes (and don't set them). */
2161 if (GET_CODE (scanbody
) == SET
2162 || GET_CODE (scanbody
) == PARALLEL
)
2164 if (get_attr_cond (this_insn
) != COND_CANUSE
)
2167 /* We can't handle other insns like sequences. */
2179 if ((!seeking_return
) && (arc_ccfsm_state
== 1 || reverse
))
2180 arc_ccfsm_target_label
= CODE_LABEL_NUMBER (label
);
2181 else if (seeking_return
|| arc_ccfsm_state
== 2)
2183 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
2185 this_insn
= next_nonnote_insn (this_insn
);
2186 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
2187 || GET_CODE (this_insn
) == CODE_LABEL
))
2192 /* Oh dear! we ran off the end, give up. */
2193 extract_insn_cached (insn
);
2194 arc_ccfsm_state
= 0;
2195 arc_ccfsm_target_insn
= NULL
;
2198 arc_ccfsm_target_insn
= this_insn
;
2203 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2206 arc_ccfsm_current_cc
= get_arc_condition_code (XEXP (SET_SRC (body
),
2209 if (reverse
|| then_not_else
)
2210 arc_ccfsm_current_cc
= ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc
);
2213 /* Restore recog_data. Getting the attributes of other insns can
2214 destroy this array, but final.c assumes that it remains intact
2215 across this call. */
2216 extract_insn_cached (insn
);
2220 /* Record that we are currently outputting label NUM with prefix PREFIX.
2221 It it's the label we're looking for, reset the ccfsm machinery.
2223 Called from (*targetm.asm_out.internal_label). */
2226 arc_ccfsm_at_label (const char *prefix
, int num
)
2228 if (arc_ccfsm_state
== 3 && arc_ccfsm_target_label
== num
2229 && !strcmp (prefix
, "L"))
2231 arc_ccfsm_state
= 0;
2232 arc_ccfsm_target_insn
= NULL_RTX
;
2236 /* See if the current insn, which is a conditional branch, is to be
2240 arc_ccfsm_branch_deleted_p (void)
2242 if (arc_ccfsm_state
== 1 || arc_ccfsm_state
== 2)
2247 /* Record a branch isn't output because subsequent insns can be
2251 arc_ccfsm_record_branch_deleted (void)
2253 /* Indicate we're conditionalizing insns now. */
2254 arc_ccfsm_state
+= 2;
2256 /* If the next insn is a subroutine call, we still need a nop between the
2257 cc setter and user. We need to undo the effect of calling record_cc_ref
2258 for the just deleted branch. */
2259 current_insn_set_cc_p
= last_insn_set_cc_p
;
2263 arc_va_start (tree valist
, rtx nextarg
)
2265 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2266 if (current_function_args_info
< 8
2267 && (current_function_args_info
& 1))
2268 nextarg
= plus_constant (nextarg
, UNITS_PER_WORD
);
2270 std_expand_builtin_va_start (valist
, nextarg
);
2274 arc_va_arg (tree valist
, tree type
)
2278 tree type_ptr
= build_pointer_type (type
);
2280 /* All aggregates are passed by reference. All scalar types larger
2281 than 8 bytes are passed by reference. */
2283 if (AGGREGATE_TYPE_P (type
) || int_size_in_bytes (type
) > 8)
2285 tree type_ptr_ptr
= build_pointer_type (type_ptr
);
2287 addr
= build (INDIRECT_REF
, type_ptr
,
2288 build (NOP_EXPR
, type_ptr_ptr
, valist
));
2290 incr
= build (PLUS_EXPR
, TREE_TYPE (valist
),
2291 valist
, build_int_2 (UNITS_PER_WORD
, 0));
2295 HOST_WIDE_INT align
, rounded_size
;
2297 /* Compute the rounded size of the type. */
2298 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
2299 rounded_size
= (((TREE_INT_CST_LOW (TYPE_SIZE (type
)) / BITS_PER_UNIT
2300 + align
- 1) / align
) * align
);
2302 /* Align 8 byte operands. */
2304 if (TYPE_ALIGN (type
) > BITS_PER_WORD
)
2306 /* AP = (TYPE *)(((int)AP + 7) & -8) */
2308 addr
= build (NOP_EXPR
, integer_type_node
, valist
);
2309 addr
= fold (build (PLUS_EXPR
, integer_type_node
, addr
,
2310 build_int_2 (7, 0)));
2311 addr
= fold (build (BIT_AND_EXPR
, integer_type_node
, addr
,
2312 build_int_2 (-8, 0)));
2313 addr
= fold (build (NOP_EXPR
, TREE_TYPE (valist
), addr
));
2316 /* The increment is always rounded_size past the aligned pointer. */
2317 incr
= fold (build (PLUS_EXPR
, TREE_TYPE (addr
), addr
,
2318 build_int_2 (rounded_size
, 0)));
2320 /* Adjust the pointer in big-endian mode. */
2321 if (BYTES_BIG_ENDIAN
)
2324 adj
= TREE_INT_CST_LOW (TYPE_SIZE (type
)) / BITS_PER_UNIT
;
2325 if (rounded_size
> align
)
2328 addr
= fold (build (PLUS_EXPR
, TREE_TYPE (addr
), addr
,
2329 build_int_2 (rounded_size
- adj
, 0)));
2333 /* Evaluate the data address. */
2334 addr_rtx
= expand_expr (addr
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
2335 addr_rtx
= copy_to_reg (addr_rtx
);
2337 /* Compute new value for AP. */
2338 incr
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, incr
);
2339 TREE_SIDE_EFFECTS (incr
) = 1;
2340 expand_expr (incr
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2345 /* This is how to output a definition of an internal numbered label where
2346 PREFIX is the class of label and NUM is the number within the class. */
2349 arc_internal_label (FILE *stream
, const char *prefix
, unsigned long labelno
)
2351 arc_ccfsm_at_label (prefix
, labelno
);
2352 default_internal_label (stream
, prefix
, labelno
);