1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
26 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "target-def.h"
46 /* Which cpu we're compiling for (NULL(=base), ???). */
47 const char *arc_cpu_string
;
50 /* Name of mangle string to add to symbols to separate code compiled for each
52 const char *arc_mangle_cpu
;
54 /* Save the operands last given to a compare for use when we
55 generate a scc or bcc insn. */
56 rtx arc_compare_op0
, arc_compare_op1
;
58 /* Name of text, data, and rodata sections, as specified on command line.
59 Selected by -m{text,data,rodata} flags. */
60 const char *arc_text_string
= ARC_DEFAULT_TEXT_SECTION
;
61 const char *arc_data_string
= ARC_DEFAULT_DATA_SECTION
;
62 const char *arc_rodata_string
= ARC_DEFAULT_RODATA_SECTION
;
64 /* Name of text, data, and rodata sections used in varasm.c. */
65 const char *arc_text_section
;
66 const char *arc_data_section
;
67 const char *arc_rodata_section
;
69 /* Array of valid operand punctuation characters. */
70 char arc_punct_chars
[256];
72 /* Variables used by arc_final_prescan_insn to implement conditional
74 static int arc_ccfsm_state
;
75 static int arc_ccfsm_current_cc
;
76 static rtx arc_ccfsm_target_insn
;
77 static int arc_ccfsm_target_label
;
79 /* The maximum number of insns skipped which will be conditionalised if
81 #define MAX_INSNS_SKIPPED 3
83 /* A nop is needed between a 4 byte insn that sets the condition codes and
84 a branch that uses them (the same isn't true for an 8 byte insn that sets
85 the condition codes). Set by arc_final_prescan_insn. Used by
87 static int last_insn_set_cc_p
;
88 static int current_insn_set_cc_p
;
89 static void record_cc_ref (rtx
);
90 static void arc_init_reg_tables (void);
91 static int get_arc_condition_code (rtx
);
92 const struct attribute_spec arc_attribute_table
[];
93 static tree
arc_handle_interrupt_attribute (tree
*, tree
, tree
, int, bool *);
94 static bool arc_assemble_integer (rtx
, unsigned int, int);
95 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT
);
96 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT
);
97 static void arc_file_start (void);
98 static void arc_internal_label (FILE *, const char *, unsigned long);
99 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS
*, enum machine_mode
,
101 static bool arc_rtx_costs (rtx
, int, int, int *);
102 static int arc_address_cost (rtx
);
103 static void arc_external_libcall (rtx
);
104 static bool arc_return_in_memory (tree
, tree
);
106 /* Initialize the GCC target structure. */
107 #undef TARGET_ASM_ALIGNED_HI_OP
108 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
109 #undef TARGET_ASM_ALIGNED_SI_OP
110 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
111 #undef TARGET_ASM_INTEGER
112 #define TARGET_ASM_INTEGER arc_assemble_integer
114 #undef TARGET_ASM_FUNCTION_PROLOGUE
115 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
116 #undef TARGET_ASM_FUNCTION_EPILOGUE
117 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
118 #undef TARGET_ASM_FILE_START
119 #define TARGET_ASM_FILE_START arc_file_start
120 #undef TARGET_ATTRIBUTE_TABLE
121 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
122 #undef TARGET_ASM_INTERNAL_LABEL
123 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
124 #undef TARGET_ASM_EXTERNAL_LIBCALL
125 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
127 #undef TARGET_RTX_COSTS
128 #define TARGET_RTX_COSTS arc_rtx_costs
129 #undef TARGET_ADDRESS_COST
130 #define TARGET_ADDRESS_COST arc_address_cost
132 #undef TARGET_PROMOTE_FUNCTION_ARGS
133 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
134 #undef TARGET_PROMOTE_FUNCTION_RETURN
135 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
136 #undef TARGET_PROMOTE_PROTOTYPES
137 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
139 #undef TARGET_RETURN_IN_MEMORY
140 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
142 #undef TARGET_SETUP_INCOMING_VARARGS
143 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
145 struct gcc_target targetm
= TARGET_INITIALIZER
;
147 /* Called by OVERRIDE_OPTIONS to initialize various things. */
154 if (arc_cpu_string
== 0
155 || !strcmp (arc_cpu_string
, "base"))
157 /* Ensure we have a printable value for the .cpu pseudo-op. */
158 arc_cpu_string
= "base";
160 arc_mangle_cpu
= NULL
;
162 else if (ARC_EXTENSION_CPU (arc_cpu_string
))
163 ; /* nothing to do */
166 error ("bad value (%s) for -mcpu switch", arc_cpu_string
);
167 arc_cpu_string
= "base";
169 arc_mangle_cpu
= NULL
;
172 /* Set the pseudo-ops for the various standard sections. */
173 arc_text_section
= tmp
= xmalloc (strlen (arc_text_string
) + sizeof (ARC_SECTION_FORMAT
) + 1);
174 sprintf (tmp
, ARC_SECTION_FORMAT
, arc_text_string
);
175 arc_data_section
= tmp
= xmalloc (strlen (arc_data_string
) + sizeof (ARC_SECTION_FORMAT
) + 1);
176 sprintf (tmp
, ARC_SECTION_FORMAT
, arc_data_string
);
177 arc_rodata_section
= tmp
= xmalloc (strlen (arc_rodata_string
) + sizeof (ARC_SECTION_FORMAT
) + 1);
178 sprintf (tmp
, ARC_SECTION_FORMAT
, arc_rodata_string
);
180 arc_init_reg_tables ();
182 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
183 memset (arc_punct_chars
, 0, sizeof (arc_punct_chars
));
184 arc_punct_chars
['#'] = 1;
185 arc_punct_chars
['*'] = 1;
186 arc_punct_chars
['?'] = 1;
187 arc_punct_chars
['!'] = 1;
188 arc_punct_chars
['~'] = 1;
191 /* The condition codes of the ARC, and the inverse function. */
192 static const char *const arc_condition_codes
[] =
194 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
195 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
198 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
200 /* Returns the index of the ARC condition code string in
201 `arc_condition_codes'. COMPARISON should be an rtx like
202 `(eq (...) (...))'. */
205 get_arc_condition_code (rtx comparison
)
207 switch (GET_CODE (comparison
))
215 case GTU
: return 14;
216 case LEU
: return 15;
225 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
226 return the mode to be used for the comparison. */
229 arc_select_cc_mode (enum rtx_code op
,
230 rtx x ATTRIBUTE_UNUSED
,
231 rtx y ATTRIBUTE_UNUSED
)
239 switch (GET_CODE (x
))
258 /* Vectors to keep interesting information about registers where it can easily
259 be got. We use to use the actual mode value as the bit number, but there
260 is (or may be) more than 32 modes now. Instead we use two tables: one
261 indexed by hard register number, and one indexed by mode. */
263 /* The purpose of arc_mode_class is to shrink the range of modes so that
264 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
265 mapped into one arc_mode_class mode. */
267 enum arc_mode_class
{
269 S_MODE
, D_MODE
, T_MODE
, O_MODE
,
270 SF_MODE
, DF_MODE
, TF_MODE
, OF_MODE
273 /* Modes for condition codes. */
274 #define C_MODES (1 << (int) C_MODE)
276 /* Modes for single-word and smaller quantities. */
277 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
279 /* Modes for double-word and smaller quantities. */
280 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
282 /* Modes for quad-word and smaller quantities. */
283 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
285 /* Value is 1 if register/mode pair is acceptable on arc. */
287 const unsigned int arc_hard_regno_mode_ok
[] = {
288 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
,
289 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
,
290 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, D_MODES
,
291 D_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
293 /* ??? Leave these as S_MODES for now. */
294 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
295 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
296 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
,
297 S_MODES
, S_MODES
, S_MODES
, S_MODES
, S_MODES
, C_MODES
300 unsigned int arc_mode_class
[NUM_MACHINE_MODES
];
302 enum reg_class arc_regno_reg_class
[FIRST_PSEUDO_REGISTER
];
305 arc_init_reg_tables (void)
309 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
311 switch (GET_MODE_CLASS (i
))
314 case MODE_PARTIAL_INT
:
315 case MODE_COMPLEX_INT
:
316 if (GET_MODE_SIZE (i
) <= 4)
317 arc_mode_class
[i
] = 1 << (int) S_MODE
;
318 else if (GET_MODE_SIZE (i
) == 8)
319 arc_mode_class
[i
] = 1 << (int) D_MODE
;
320 else if (GET_MODE_SIZE (i
) == 16)
321 arc_mode_class
[i
] = 1 << (int) T_MODE
;
322 else if (GET_MODE_SIZE (i
) == 32)
323 arc_mode_class
[i
] = 1 << (int) O_MODE
;
325 arc_mode_class
[i
] = 0;
328 case MODE_COMPLEX_FLOAT
:
329 if (GET_MODE_SIZE (i
) <= 4)
330 arc_mode_class
[i
] = 1 << (int) SF_MODE
;
331 else if (GET_MODE_SIZE (i
) == 8)
332 arc_mode_class
[i
] = 1 << (int) DF_MODE
;
333 else if (GET_MODE_SIZE (i
) == 16)
334 arc_mode_class
[i
] = 1 << (int) TF_MODE
;
335 else if (GET_MODE_SIZE (i
) == 32)
336 arc_mode_class
[i
] = 1 << (int) OF_MODE
;
338 arc_mode_class
[i
] = 0;
341 arc_mode_class
[i
] = 1 << (int) C_MODE
;
344 arc_mode_class
[i
] = 0;
349 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
352 arc_regno_reg_class
[i
] = GENERAL_REGS
;
354 arc_regno_reg_class
[i
] = LPCOUNT_REG
;
356 arc_regno_reg_class
[i
] = NO_REGS
/* CC_REG: must be NO_REGS */;
358 arc_regno_reg_class
[i
] = NO_REGS
;
362 /* ARC specific attribute support.
364 The ARC has these attributes:
365 interrupt - for interrupt functions
368 const struct attribute_spec arc_attribute_table
[] =
370 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
371 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute
},
372 { NULL
, 0, 0, false, false, false, NULL
}
375 /* Handle an "interrupt" attribute; arguments as in
376 struct attribute_spec.handler. */
378 arc_handle_interrupt_attribute (tree
*node ATTRIBUTE_UNUSED
,
381 int flags ATTRIBUTE_UNUSED
,
384 tree value
= TREE_VALUE (args
);
386 if (TREE_CODE (value
) != STRING_CST
)
388 warning ("argument of `%s' attribute is not a string constant",
389 IDENTIFIER_POINTER (name
));
390 *no_add_attrs
= true;
392 else if (strcmp (TREE_STRING_POINTER (value
), "ilink1")
393 && strcmp (TREE_STRING_POINTER (value
), "ilink2"))
395 warning ("argument of `%s' attribute is not \"ilink1\" or \"ilink2\"",
396 IDENTIFIER_POINTER (name
));
397 *no_add_attrs
= true;
404 /* Acceptable arguments to the call insn. */
407 call_address_operand (rtx op
, enum machine_mode mode
)
409 return (symbolic_operand (op
, mode
)
410 || (GET_CODE (op
) == CONST_INT
&& LEGITIMATE_CONSTANT_P (op
))
411 || (GET_CODE (op
) == REG
));
415 call_operand (rtx op
, enum machine_mode mode
)
417 if (GET_CODE (op
) != MEM
)
420 return call_address_operand (op
, mode
);
423 /* Returns 1 if OP is a symbol reference. */
426 symbolic_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
428 switch (GET_CODE (op
))
439 /* Return truth value of statement that OP is a symbolic memory
440 operand of mode MODE. */
443 symbolic_memory_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
445 if (GET_CODE (op
) == SUBREG
)
446 op
= SUBREG_REG (op
);
447 if (GET_CODE (op
) != MEM
)
450 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
451 || GET_CODE (op
) == LABEL_REF
);
454 /* Return true if OP is a short immediate (shimm) value. */
457 short_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
459 if (GET_CODE (op
) != CONST_INT
)
461 return SMALL_INT (INTVAL (op
));
464 /* Return true if OP will require a long immediate (limm) value.
465 This is currently only used when calculating length attributes. */
468 long_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
470 switch (GET_CODE (op
))
477 return !SMALL_INT (INTVAL (op
));
479 /* These can happen because large unsigned 32 bit constants are
480 represented this way (the multiplication patterns can cause these
481 to be generated). They also occur for SFmode values. */
489 /* Return true if OP is a MEM that when used as a load or store address will
490 require an 8 byte insn.
491 Load and store instructions don't allow the same possibilities but they're
492 similar enough that this one function will do.
493 This is currently only used when calculating length attributes. */
496 long_immediate_loadstore_operand (rtx op
,
497 enum machine_mode mode ATTRIBUTE_UNUSED
)
499 if (GET_CODE (op
) != MEM
)
503 switch (GET_CODE (op
))
510 /* This must be handled as "st c,[limm]". Ditto for load.
511 Technically, the assembler could translate some possibilities to
512 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
513 assume that it does. */
516 /* These can happen because large unsigned 32 bit constants are
517 represented this way (the multiplication patterns can cause these
518 to be generated). They also occur for SFmode values. */
523 if (GET_CODE (XEXP (op
, 1)) == CONST_INT
524 && !SMALL_INT (INTVAL (XEXP (op
, 1))))
533 /* Return true if OP is an acceptable argument for a single word
537 move_src_operand (rtx op
, enum machine_mode mode
)
539 switch (GET_CODE (op
))
546 return (LARGE_INT (INTVAL (op
)));
548 /* We can handle DImode integer constants in SImode if the value
549 (signed or unsigned) will fit in 32 bits. This is needed because
550 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
552 return arc_double_limm_p (op
);
553 /* We can handle 32 bit floating point constants. */
555 return GET_MODE (op
) == SFmode
;
558 return register_operand (op
, mode
);
560 /* (subreg (mem ...) ...) can occur here if the inner part was once a
561 pseudo-reg and is now a stack slot. */
562 if (GET_CODE (SUBREG_REG (op
)) == MEM
)
563 return address_operand (XEXP (SUBREG_REG (op
), 0), mode
);
565 return register_operand (op
, mode
);
567 return address_operand (XEXP (op
, 0), mode
);
573 /* Return true if OP is an acceptable argument for a double word
577 move_double_src_operand (rtx op
, enum machine_mode mode
)
579 switch (GET_CODE (op
))
582 return register_operand (op
, mode
);
584 /* (subreg (mem ...) ...) can occur here if the inner part was once a
585 pseudo-reg and is now a stack slot. */
586 if (GET_CODE (SUBREG_REG (op
)) == MEM
)
587 return move_double_src_operand (SUBREG_REG (op
), mode
);
589 return register_operand (op
, mode
);
591 /* Disallow auto inc/dec for now. */
592 if (GET_CODE (XEXP (op
, 0)) == PRE_DEC
593 || GET_CODE (XEXP (op
, 0)) == PRE_INC
)
595 return address_operand (XEXP (op
, 0), mode
);
604 /* Return true if OP is an acceptable argument for a move destination. */
607 move_dest_operand (rtx op
, enum machine_mode mode
)
609 switch (GET_CODE (op
))
612 return register_operand (op
, mode
);
614 /* (subreg (mem ...) ...) can occur here if the inner part was once a
615 pseudo-reg and is now a stack slot. */
616 if (GET_CODE (SUBREG_REG (op
)) == MEM
)
617 return address_operand (XEXP (SUBREG_REG (op
), 0), mode
);
619 return register_operand (op
, mode
);
621 return address_operand (XEXP (op
, 0), mode
);
627 /* Return true if OP is valid load with update operand. */
630 load_update_operand (rtx op
, enum machine_mode mode
)
632 if (GET_CODE (op
) != MEM
633 || GET_MODE (op
) != mode
)
636 if (GET_CODE (op
) != PLUS
637 || GET_MODE (op
) != Pmode
638 || !register_operand (XEXP (op
, 0), Pmode
)
639 || !nonmemory_operand (XEXP (op
, 1), Pmode
))
644 /* Return true if OP is valid store with update operand. */
647 store_update_operand (rtx op
, enum machine_mode mode
)
649 if (GET_CODE (op
) != MEM
650 || GET_MODE (op
) != mode
)
653 if (GET_CODE (op
) != PLUS
654 || GET_MODE (op
) != Pmode
655 || !register_operand (XEXP (op
, 0), Pmode
)
656 || !(GET_CODE (XEXP (op
, 1)) == CONST_INT
657 && SMALL_INT (INTVAL (XEXP (op
, 1)))))
662 /* Return true if OP is a non-volatile non-immediate operand.
663 Volatile memory refs require a special "cache-bypass" instruction
664 and only the standard movXX patterns are set up to handle them. */
667 nonvol_nonimm_operand (rtx op
, enum machine_mode mode
)
669 if (GET_CODE (op
) == MEM
&& MEM_VOLATILE_P (op
))
671 return nonimmediate_operand (op
, mode
);
674 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
675 to check the range carefully since this predicate is used in DImode
679 const_sint32_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
681 /* All allowed constants will fit a CONST_INT. */
682 return (GET_CODE (op
) == CONST_INT
683 && (INTVAL (op
) >= (-0x7fffffff - 1) && INTVAL (op
) <= 0x7fffffff));
686 /* Accept integer operands in the range 0..0xffffffff. We have to check the
687 range carefully since this predicate is used in DImode contexts. Also, we
688 need some extra crud to make it work when hosted on 64-bit machines. */
691 const_uint32_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
693 #if HOST_BITS_PER_WIDE_INT > 32
694 /* All allowed constants will fit a CONST_INT. */
695 return (GET_CODE (op
) == CONST_INT
696 && (INTVAL (op
) >= 0 && INTVAL (op
) <= 0xffffffffL
));
698 return ((GET_CODE (op
) == CONST_INT
&& INTVAL (op
) >= 0)
699 || (GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_HIGH (op
) == 0));
703 /* Return 1 if OP is a comparison operator valid for the mode of CC.
704 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
706 Some insns only set a few bits in the condition code. So only allow those
707 comparisons that use the bits that are valid. */
710 proper_comparison_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
713 if (!COMPARISON_P (op
))
716 code
= GET_CODE (op
);
717 if (GET_MODE (XEXP (op
, 0)) == CCZNmode
)
718 return (code
== EQ
|| code
== NE
);
719 if (GET_MODE (XEXP (op
, 0)) == CCZNCmode
)
720 return (code
== EQ
|| code
== NE
721 || code
== LTU
|| code
== GEU
|| code
== GTU
|| code
== LEU
);
725 /* Misc. utilities. */
727 /* X and Y are two things to compare using CODE. Emit the compare insn and
728 return the rtx for the cc reg in the proper mode. */
731 gen_compare_reg (enum rtx_code code
, rtx x
, rtx y
)
733 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
736 cc_reg
= gen_rtx_REG (mode
, 61);
738 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
739 gen_rtx_COMPARE (mode
, x
, y
)));
744 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
745 We assume the value can be either signed or unsigned. */
748 arc_double_limm_p (rtx value
)
750 HOST_WIDE_INT low
, high
;
752 if (GET_CODE (value
) != CONST_DOUBLE
)
755 low
= CONST_DOUBLE_LOW (value
);
756 high
= CONST_DOUBLE_HIGH (value
);
758 if (low
& 0x80000000)
760 return (((unsigned HOST_WIDE_INT
) low
<= 0xffffffff && high
== 0)
761 || (((low
& - (unsigned HOST_WIDE_INT
) 0x80000000)
762 == - (unsigned HOST_WIDE_INT
) 0x80000000)
767 return (unsigned HOST_WIDE_INT
) low
<= 0x7fffffff && high
== 0;
771 /* Do any needed setup for a variadic function. For the ARC, we must
772 create a register parameter block, and then copy any anonymous arguments
773 in registers to memory.
775 CUM has not been updated for the last named argument which has type TYPE
776 and mode MODE, and we rely on this fact.
778 We do things a little weird here. We're supposed to only allocate space
779 for the anonymous arguments. However we need to keep the stack eight byte
780 aligned. So we round the space up if necessary, and leave it to va_start
784 arc_setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
785 enum machine_mode mode
,
786 tree type ATTRIBUTE_UNUSED
,
792 /* All BLKmode values are passed by reference. */
796 first_anon_arg
= *cum
+ ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1)
799 if (first_anon_arg
< MAX_ARC_PARM_REGS
&& !no_rtl
)
801 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
802 int first_reg_offset
= first_anon_arg
;
803 /* Size in words to "pretend" allocate. */
804 int size
= MAX_ARC_PARM_REGS
- first_reg_offset
;
805 /* Extra slop to keep stack eight byte aligned. */
806 int align_slop
= size
& 1;
809 regblock
= gen_rtx_MEM (BLKmode
,
810 plus_constant (arg_pointer_rtx
,
811 FIRST_PARM_OFFSET (0)
812 + align_slop
* UNITS_PER_WORD
));
813 set_mem_alias_set (regblock
, get_varargs_alias_set ());
814 set_mem_align (regblock
, BITS_PER_WORD
);
815 move_block_from_reg (first_reg_offset
, regblock
,
816 MAX_ARC_PARM_REGS
- first_reg_offset
);
818 *pretend_size
= ((MAX_ARC_PARM_REGS
- first_reg_offset
+ align_slop
)
823 /* Cost functions. */
825 /* Compute a (partial) cost for rtx X. Return true if the complete
826 cost has been computed, and false if subexpressions should be
827 scanned. In either case, *TOTAL contains the cost result. */
830 arc_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
)
834 /* Small integers are as cheap as registers. 4 byte values can
835 be fetched as immediate constants - let's give that the cost
838 if (SMALL_INT (INTVAL (x
)))
848 *total
= COSTS_N_INSNS (1);
854 split_double (x
, &high
, &low
);
855 *total
= COSTS_N_INSNS (!SMALL_INT (INTVAL (high
))
856 + !SMALL_INT (INTVAL (low
)));
860 /* Encourage synth_mult to find a synthetic multiply when reasonable.
861 If we need more than 12 insns to do a multiply, then go out-of-line,
862 since the call overhead will be < 10% of the cost of the multiply. */
867 *total
= COSTS_N_INSNS (1);
868 else if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
869 *total
= COSTS_N_INSNS (16);
871 *total
= COSTS_N_INSNS (INTVAL (XEXP ((x
), 1)));
880 /* Provide the costs of an addressing mode that contains ADDR.
881 If ADDR is not a valid address, its cost is irrelevant. */
884 arc_address_cost (rtx addr
)
886 switch (GET_CODE (addr
))
898 register rtx plus0
= XEXP (addr
, 0);
899 register rtx plus1
= XEXP (addr
, 1);
901 if (GET_CODE (plus0
) != REG
)
904 switch (GET_CODE (plus1
))
907 return SMALL_INT (plus1
) ? 1 : 2;
924 /* Function prologue/epilogue handlers. */
926 /* ARC stack frames look like:
928 Before call After call
929 +-----------------------+ +-----------------------+
931 high | local variables, | | local variables, |
932 mem | reg save area, etc. | | reg save area, etc. |
934 +-----------------------+ +-----------------------+
936 | arguments on stack. | | arguments on stack. |
938 SP+16->+-----------------------+FP+48->+-----------------------+
939 | 4 word save area for | | reg parm save area, |
940 | return addr, prev %fp | | only created for |
941 SP+0->+-----------------------+ | variable argument |
943 FP+16->+-----------------------+
944 | 4 word save area for |
945 | return addr, prev %fp |
946 FP+0->+-----------------------+
950 +-----------------------+
952 | register save area |
954 +-----------------------+
956 | alloca allocations |
958 +-----------------------+
960 | arguments on stack |
962 SP+16->+-----------------------+
963 low | 4 word save area for |
964 memory | return addr, prev %fp |
965 SP+0->+-----------------------+
968 1) The "reg parm save area" does not exist for non variable argument fns.
969 The "reg parm save area" can be eliminated completely if we created our
970 own va-arc.h, but that has tradeoffs as well (so it's not done). */
972 /* Structure to be filled in by arc_compute_frame_size with register
973 save masks, and offsets for the current function. */
974 struct arc_frame_info
976 unsigned int total_size
; /* # bytes that the entire frame takes up. */
977 unsigned int extra_size
; /* # bytes of extra stuff. */
978 unsigned int pretend_size
; /* # bytes we push and pretend caller did. */
979 unsigned int args_size
; /* # bytes that outgoing arguments take up. */
980 unsigned int reg_size
; /* # bytes needed to store regs. */
981 unsigned int var_size
; /* # bytes that variables take up. */
982 unsigned int reg_offset
; /* Offset from new sp to store regs. */
983 unsigned int gmask
; /* Mask of saved gp registers. */
984 int initialized
; /* Nonzero if frame size already calculated. */
987 /* Current frame information calculated by arc_compute_frame_size. */
988 static struct arc_frame_info current_frame_info
;
990 /* Zero structure to initialize current_frame_info. */
991 static struct arc_frame_info zero_frame_info
;
993 /* Type of function DECL.
995 The result is cached. To reset the cache at the end of a function,
996 call with DECL = NULL_TREE. */
998 enum arc_function_type
999 arc_compute_function_type (tree decl
)
1003 static enum arc_function_type fn_type
= ARC_FUNCTION_UNKNOWN
;
1004 /* Last function we were called for. */
1005 static tree last_fn
= NULL_TREE
;
1007 /* Resetting the cached value? */
1008 if (decl
== NULL_TREE
)
1010 fn_type
= ARC_FUNCTION_UNKNOWN
;
1011 last_fn
= NULL_TREE
;
1015 if (decl
== last_fn
&& fn_type
!= ARC_FUNCTION_UNKNOWN
)
1018 /* Assume we have a normal function (not an interrupt handler). */
1019 fn_type
= ARC_FUNCTION_NORMAL
;
1021 /* Now see if this is an interrupt handler. */
1022 for (a
= DECL_ATTRIBUTES (current_function_decl
);
1026 tree name
= TREE_PURPOSE (a
), args
= TREE_VALUE (a
);
1028 if (name
== get_identifier ("__interrupt__")
1029 && list_length (args
) == 1
1030 && TREE_CODE (TREE_VALUE (args
)) == STRING_CST
)
1032 tree value
= TREE_VALUE (args
);
1034 if (!strcmp (TREE_STRING_POINTER (value
), "ilink1"))
1035 fn_type
= ARC_FUNCTION_ILINK1
;
1036 else if (!strcmp (TREE_STRING_POINTER (value
), "ilink2"))
1037 fn_type
= ARC_FUNCTION_ILINK2
;
1048 #define ILINK1_REGNUM 29
1049 #define ILINK2_REGNUM 30
1050 #define RETURN_ADDR_REGNUM 31
1051 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1052 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1054 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1055 The return address and frame pointer are treated separately.
1056 Don't consider them here. */
1057 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1058 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1059 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1061 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1063 /* Return the bytes needed to compute the frame pointer from the current
1066 SIZE is the size needed for local variables. */
1069 arc_compute_frame_size (int size
/* # of var. bytes allocated. */)
1072 unsigned int total_size
, var_size
, args_size
, pretend_size
, extra_size
;
1073 unsigned int reg_size
, reg_offset
;
1075 enum arc_function_type fn_type
;
1079 args_size
= current_function_outgoing_args_size
;
1080 pretend_size
= current_function_pretend_args_size
;
1081 extra_size
= FIRST_PARM_OFFSET (0);
1082 total_size
= extra_size
+ pretend_size
+ args_size
+ var_size
;
1083 reg_offset
= FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size
;
1087 /* See if this is an interrupt handler. Call used registers must be saved
1089 fn_type
= arc_compute_function_type (current_function_decl
);
1090 interrupt_p
= ARC_INTERRUPT_P (fn_type
);
1092 /* Calculate space needed for registers.
1093 ??? We ignore the extension registers for now. */
1095 for (regno
= 0; regno
<= 31; regno
++)
1097 if (MUST_SAVE_REGISTER (regno
, interrupt_p
))
1099 reg_size
+= UNITS_PER_WORD
;
1100 gmask
|= 1 << regno
;
1104 total_size
+= reg_size
;
1106 /* If the only space to allocate is the fp/blink save area this is an
1107 empty frame. However, if we'll be making a function call we need to
1108 allocate a stack frame for our callee's fp/blink save area. */
1109 if (total_size
== extra_size
1110 && !MUST_SAVE_RETURN_ADDR
)
1111 total_size
= extra_size
= 0;
1113 total_size
= ARC_STACK_ALIGN (total_size
);
1115 /* Save computed information. */
1116 current_frame_info
.total_size
= total_size
;
1117 current_frame_info
.extra_size
= extra_size
;
1118 current_frame_info
.pretend_size
= pretend_size
;
1119 current_frame_info
.var_size
= var_size
;
1120 current_frame_info
.args_size
= args_size
;
1121 current_frame_info
.reg_size
= reg_size
;
1122 current_frame_info
.reg_offset
= reg_offset
;
1123 current_frame_info
.gmask
= gmask
;
1124 current_frame_info
.initialized
= reload_completed
;
1126 /* Ok, we're done. */
1130 /* Common code to save/restore registers. */
1133 arc_save_restore (FILE *file
,
1134 const char *base_reg
,
1135 unsigned int offset
,
1144 for (regno
= 0; regno
<= 31; regno
++)
1146 if ((gmask
& (1L << regno
)) != 0)
1148 fprintf (file
, "\t%s %s,[%s,%d]\n",
1149 op
, reg_names
[regno
], base_reg
, offset
);
1150 offset
+= UNITS_PER_WORD
;
1155 /* Target hook to assemble an integer object. The ARC version needs to
1156 emit a special directive for references to labels and function
1160 arc_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
1162 if (size
== UNITS_PER_WORD
&& aligned_p
1163 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
1164 || GET_CODE (x
) == LABEL_REF
))
1166 fputs ("\t.word\t%st(", asm_out_file
);
1167 output_addr_const (asm_out_file
, x
);
1168 fputs (")\n", asm_out_file
);
1171 return default_assemble_integer (x
, size
, aligned_p
);
1174 /* Set up the stack and frame pointer (if desired) for the function. */
1177 arc_output_function_prologue (FILE *file
, HOST_WIDE_INT size
)
1179 const char *sp_str
= reg_names
[STACK_POINTER_REGNUM
];
1180 const char *fp_str
= reg_names
[FRAME_POINTER_REGNUM
];
1181 unsigned int gmask
= current_frame_info
.gmask
;
1182 enum arc_function_type fn_type
= arc_compute_function_type (current_function_decl
);
1184 /* If this is an interrupt handler, set up our stack frame.
1185 ??? Optimize later. */
1186 if (ARC_INTERRUPT_P (fn_type
))
1188 fprintf (file
, "\t%s interrupt handler\n",
1190 fprintf (file
, "\tsub %s,%s,16\n", sp_str
, sp_str
);
1193 /* This is only for the human reader. */
1194 fprintf (file
, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1195 ASM_COMMENT_START
, ASM_COMMENT_START
,
1196 current_frame_info
.var_size
,
1197 current_frame_info
.reg_size
/ 4,
1198 current_frame_info
.args_size
,
1199 current_frame_info
.extra_size
);
1201 size
= ARC_STACK_ALIGN (size
);
1202 size
= (! current_frame_info
.initialized
1203 ? arc_compute_frame_size (size
)
1204 : current_frame_info
.total_size
);
1206 /* These cases shouldn't happen. Catch them now. */
1207 if (size
== 0 && gmask
)
1210 /* Allocate space for register arguments if this is a variadic function. */
1211 if (current_frame_info
.pretend_size
!= 0)
1212 fprintf (file
, "\tsub %s,%s,%d\n",
1213 sp_str
, sp_str
, current_frame_info
.pretend_size
);
1215 /* The home-grown ABI says link register is saved first. */
1216 if (MUST_SAVE_RETURN_ADDR
)
1217 fprintf (file
, "\tst %s,[%s,%d]\n",
1218 reg_names
[RETURN_ADDR_REGNUM
], sp_str
, UNITS_PER_WORD
);
1220 /* Set up the previous frame pointer next (if we need to). */
1221 if (frame_pointer_needed
)
1223 fprintf (file
, "\tst %s,[%s]\n", fp_str
, sp_str
);
1224 fprintf (file
, "\tmov %s,%s\n", fp_str
, sp_str
);
1227 /* ??? We don't handle the case where the saved regs are more than 252
1228 bytes away from sp. This can be handled by decrementing sp once, saving
1229 the regs, and then decrementing it again. The epilogue doesn't have this
1230 problem as the `ld' insn takes reg+limm values (though it would be more
1231 efficient to avoid reg+limm). */
1233 /* Allocate the stack frame. */
1234 if (size
- current_frame_info
.pretend_size
> 0)
1235 fprintf (file
, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC
"\n",
1236 sp_str
, sp_str
, size
- current_frame_info
.pretend_size
);
1238 /* Save any needed call-saved regs (and call-used if this is an
1239 interrupt handler). */
1240 arc_save_restore (file
, sp_str
, current_frame_info
.reg_offset
,
1241 /* The zeroing of these two bits is unnecessary,
1242 but leave this in for clarity. */
1243 gmask
& ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
),
1246 fprintf (file
, "\t%s END PROLOGUE\n", ASM_COMMENT_START
);
1249 /* Do any necessary cleanup after a function to restore stack, frame,
1253 arc_output_function_epilogue (FILE *file
, HOST_WIDE_INT size
)
1255 rtx epilogue_delay
= current_function_epilogue_delay_list
;
1256 int noepilogue
= FALSE
;
1257 enum arc_function_type fn_type
= arc_compute_function_type (current_function_decl
);
1259 /* This is only for the human reader. */
1260 fprintf (file
, "\t%s EPILOGUE\n", ASM_COMMENT_START
);
1262 size
= ARC_STACK_ALIGN (size
);
1263 size
= (!current_frame_info
.initialized
1264 ? arc_compute_frame_size (size
)
1265 : current_frame_info
.total_size
);
1267 if (size
== 0 && epilogue_delay
== 0)
1269 rtx insn
= get_last_insn ();
1271 /* If the last insn was a BARRIER, we don't have to write any code
1272 because a jump (aka return) was put there. */
1273 if (GET_CODE (insn
) == NOTE
)
1274 insn
= prev_nonnote_insn (insn
);
1275 if (insn
&& GET_CODE (insn
) == BARRIER
)
1281 unsigned int pretend_size
= current_frame_info
.pretend_size
;
1282 unsigned int frame_size
= size
- pretend_size
;
1283 int restored
, fp_restored_p
;
1284 int can_trust_sp_p
= !current_function_calls_alloca
;
1285 const char *sp_str
= reg_names
[STACK_POINTER_REGNUM
];
1286 const char *fp_str
= reg_names
[FRAME_POINTER_REGNUM
];
1288 /* ??? There are lots of optimizations that can be done here.
1289 EG: Use fp to restore regs if it's closer.
1290 Maybe in time we'll do them all. For now, always restore regs from
1291 sp, but don't restore sp if we don't have to. */
1293 if (!can_trust_sp_p
)
1295 if (!frame_pointer_needed
)
1297 fprintf (file
,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1298 sp_str
, fp_str
, frame_size
, ASM_COMMENT_START
);
1301 /* Restore any saved registers. */
1302 arc_save_restore (file
, sp_str
, current_frame_info
.reg_offset
,
1303 /* The zeroing of these two bits is unnecessary,
1304 but leave this in for clarity. */
1305 current_frame_info
.gmask
& ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
),
1308 if (MUST_SAVE_RETURN_ADDR
)
1309 fprintf (file
, "\tld %s,[%s,%d]\n",
1310 reg_names
[RETURN_ADDR_REGNUM
],
1311 frame_pointer_needed
? fp_str
: sp_str
,
1312 UNITS_PER_WORD
+ (frame_pointer_needed
? 0 : frame_size
));
1314 /* Keep track of how much of the stack pointer we've restored.
1315 It makes the following a lot more readable. */
1319 /* We try to emit the epilogue delay slot insn right after the load
1320 of the return address register so that it can execute with the
1321 stack intact. Secondly, loads are delayed. */
1322 /* ??? If stack intactness is important, always emit now. */
1323 if (MUST_SAVE_RETURN_ADDR
&& epilogue_delay
!= NULL_RTX
)
1325 final_scan_insn (XEXP (epilogue_delay
, 0), file
, 1, -2, 1, NULL
);
1326 epilogue_delay
= NULL_RTX
;
1329 if (frame_pointer_needed
)
1331 /* Try to restore the frame pointer in the delay slot. We can't,
1332 however, if any of these is true. */
1333 if (epilogue_delay
!= NULL_RTX
1334 || !SMALL_INT (frame_size
)
1336 || ARC_INTERRUPT_P (fn_type
))
1338 /* Note that we restore fp and sp here! */
1339 fprintf (file
, "\tld.a %s,[%s,%d]\n", fp_str
, sp_str
, frame_size
);
1340 restored
+= frame_size
;
1344 else if (!SMALL_INT (size
/* frame_size + pretend_size */)
1345 || ARC_INTERRUPT_P (fn_type
))
1347 fprintf (file
, "\tadd %s,%s,%d\n", sp_str
, sp_str
, frame_size
);
1348 restored
+= frame_size
;
1351 /* These must be done before the return insn because the delay slot
1352 does the final stack restore. */
1353 if (ARC_INTERRUPT_P (fn_type
))
1357 final_scan_insn (XEXP (epilogue_delay
, 0), file
, 1, -2, 1,
1362 /* Emit the return instruction. */
1364 static const int regs
[4] = {
1365 0, RETURN_ADDR_REGNUM
, ILINK1_REGNUM
, ILINK2_REGNUM
1367 fprintf (file
, "\tj.d %s\n", reg_names
[regs
[fn_type
]]);
1370 /* If the only register saved is the return address, we need a
1371 nop, unless we have an instruction to put into it. Otherwise
1372 we don't since reloading multiple registers doesn't reference
1373 the register being loaded. */
1375 if (ARC_INTERRUPT_P (fn_type
))
1376 fprintf (file
, "\tadd %s,%s,16\n", sp_str
, sp_str
);
1377 else if (epilogue_delay
!= NULL_RTX
)
1379 if (frame_pointer_needed
&& !fp_restored_p
)
1381 if (restored
< size
)
1383 final_scan_insn (XEXP (epilogue_delay
, 0), file
, 1, -2, 1, NULL
);
1385 else if (frame_pointer_needed
&& !fp_restored_p
)
1387 if (!SMALL_INT (frame_size
))
1389 /* Note that we restore fp and sp here! */
1390 fprintf (file
, "\tld.a %s,[%s,%d]\n", fp_str
, sp_str
, frame_size
);
1392 else if (restored
< size
)
1394 if (!SMALL_INT (size
- restored
))
1396 fprintf (file
, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC
"\n",
1397 sp_str
, sp_str
, size
- restored
);
1400 fprintf (file
, "\tnop\n");
1403 /* Reset state info for each function. */
1404 current_frame_info
= zero_frame_info
;
1405 arc_compute_function_type (NULL_TREE
);
1408 /* Define the number of delay slots needed for the function epilogue.
1410 Interrupt handlers can't have any epilogue delay slots (it's always needed
1411 for something else, I think). For normal functions, we have to worry about
1412 using call-saved regs as they'll be restored before the delay slot insn.
1413 Functions with non-empty frames already have enough choices for the epilogue
1414 delay slot so for now we only consider functions with empty frames. */
1417 arc_delay_slots_for_epilogue (void)
1419 if (arc_compute_function_type (current_function_decl
) != ARC_FUNCTION_NORMAL
)
1421 if (!current_frame_info
.initialized
)
1422 (void) arc_compute_frame_size (get_frame_size ());
1423 if (current_frame_info
.total_size
== 0)
1428 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1429 Any single length instruction which doesn't reference the stack or frame
1430 pointer or any call-saved register is OK. SLOT will always be 0. */
1433 arc_eligible_for_epilogue_delay (rtx trial
, int slot
)
1438 if (get_attr_length (trial
) == 1
1439 /* If registers where saved, presumably there's more than enough
1440 possibilities for the delay slot. The alternative is something
1441 more complicated (of course, if we expanded the epilogue as rtl
1442 this problem would go away). */
1443 /* ??? Note that this will always be true since only functions with
1444 empty frames have epilogue delay slots. See
1445 arc_delay_slots_for_epilogue. */
1446 && current_frame_info
.gmask
== 0
1447 && ! reg_mentioned_p (stack_pointer_rtx
, PATTERN (trial
))
1448 && ! reg_mentioned_p (frame_pointer_rtx
, PATTERN (trial
)))
1455 /* Emit special PIC prologues and epilogues. */
1458 arc_finalize_pic (void)
1463 /* Return true if OP is a shift operator. */
1466 shift_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1468 switch (GET_CODE (op
))
1479 /* Output the assembler code for doing a shift.
1480 We go to a bit of trouble to generate efficient code as the ARC only has
1481 single bit shifts. This is taken from the h8300 port. We only have one
1482 mode of shifting and can't access individual bytes like the h8300 can, so
1483 this is greatly simplified (at the expense of not generating hyper-
1486 This function is not used if the variable shift insns are present. */
1488 /* ??? We assume the output operand is the same as operand 1.
1489 This can be optimized (deleted) in the case of 1 bit shifts. */
1490 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1491 using it here will give us a chance to play with it. */
1494 output_shift (rtx
*operands
)
1496 rtx shift
= operands
[3];
1497 enum machine_mode mode
= GET_MODE (shift
);
1498 enum rtx_code code
= GET_CODE (shift
);
1499 const char *shift_one
;
1506 case ASHIFT
: shift_one
= "asl %0,%0"; break;
1507 case ASHIFTRT
: shift_one
= "asr %0,%0"; break;
1508 case LSHIFTRT
: shift_one
= "lsr %0,%0"; break;
1512 if (GET_CODE (operands
[2]) != CONST_INT
)
1515 output_asm_insn ("mov lp_count,%2", operands
);
1517 output_asm_insn ("mov %4,%2", operands
);
1522 int n
= INTVAL (operands
[2]);
1524 /* If the count is negative, make it 0. */
1527 /* If the count is too big, truncate it.
1528 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1529 do the intuitive thing. */
1530 else if (n
> GET_MODE_BITSIZE (mode
))
1531 n
= GET_MODE_BITSIZE (mode
);
1533 /* First see if we can do them inline. */
1537 output_asm_insn (shift_one
, operands
);
1539 /* See if we can use a rotate/and. */
1540 else if (n
== BITS_PER_WORD
- 1)
1545 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands
);
1548 /* The ARC doesn't have a rol insn. Use something else. */
1549 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands
);
1552 /* The ARC doesn't have a rol insn. Use something else. */
1553 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands
);
1565 output_asm_insn ("mov lp_count,%c2", operands
);
1567 output_asm_insn ("mov %4,%c2", operands
);
1572 sprintf (buf
, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1575 sprintf (buf
, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1577 output_asm_insn (buf
, operands
);
1578 output_asm_insn ("sr %4,[lp_start]", operands
);
1579 output_asm_insn ("add %4,%4,1", operands
);
1580 output_asm_insn ("sr %4,[lp_end]", operands
);
1581 output_asm_insn ("nop\n\tnop", operands
);
1583 fprintf (asm_out_file
, "\t%s single insn loop\n",
1586 fprintf (asm_out_file
, "1:\t%s single insn loop\n",
1588 output_asm_insn (shift_one
, operands
);
1592 fprintf (asm_out_file
, "1:\t%s begin shift loop\n",
1594 output_asm_insn ("sub.f %4,%4,1", operands
);
1595 output_asm_insn ("nop", operands
);
1596 output_asm_insn ("bn.nd 2f", operands
);
1597 output_asm_insn (shift_one
, operands
);
1598 output_asm_insn ("b.nd 1b", operands
);
1599 fprintf (asm_out_file
, "2:\t%s end shift loop\n",
1608 /* Nested function support. */
1610 /* Emit RTL insns to initialize the variable parts of a trampoline.
1611 FNADDR is an RTX for the address of the function's pure code.
1612 CXT is an RTX for the static chain value for the function. */
1615 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED
,
1616 rtx fnaddr ATTRIBUTE_UNUSED
,
1617 rtx cxt ATTRIBUTE_UNUSED
)
1621 /* Set the cpu type and print out other fancy things,
1622 at the top of the file. */
1625 arc_file_start (void)
1627 default_file_start ();
1628 fprintf (asm_out_file
, "\t.cpu %s\n", arc_cpu_string
);
1631 /* Print operand X (an rtx) in assembler syntax to file FILE.
1632 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1633 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1636 arc_print_operand (FILE *file
, rtx x
, int code
)
1641 /* Conditional branches. For now these are equivalent. */
1643 /* Unconditional branches. Output the appropriate delay slot suffix. */
1644 if (!final_sequence
|| XVECLEN (final_sequence
, 0) == 1)
1646 /* There's nothing in the delay slot. */
1647 fputs (".nd", file
);
1651 rtx jump
= XVECEXP (final_sequence
, 0, 0);
1652 rtx delay
= XVECEXP (final_sequence
, 0, 1);
1653 if (INSN_ANNULLED_BRANCH_P (jump
))
1654 fputs (INSN_FROM_TARGET_P (delay
) ? ".jd" : ".nd", file
);
1659 case '?' : /* with leading "." */
1660 case '!' : /* without leading "." */
1661 /* This insn can be conditionally executed. See if the ccfsm machinery
1662 says it should be conditionalized. */
1663 if (arc_ccfsm_state
== 3 || arc_ccfsm_state
== 4)
1665 /* Is this insn in a delay slot? */
1666 if (final_sequence
&& XVECLEN (final_sequence
, 0) == 2)
1668 rtx insn
= XVECEXP (final_sequence
, 0, 1);
1670 /* If the insn is annulled and is from the target path, we need
1671 to inverse the condition test. */
1672 if (INSN_ANNULLED_BRANCH_P (insn
))
1674 if (INSN_FROM_TARGET_P (insn
))
1675 fprintf (file
, "%s%s",
1676 code
== '?' ? "." : "",
1677 arc_condition_codes
[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc
)]);
1679 fprintf (file
, "%s%s",
1680 code
== '?' ? "." : "",
1681 arc_condition_codes
[arc_ccfsm_current_cc
]);
1685 /* This insn is executed for either path, so don't
1686 conditionalize it at all. */
1687 ; /* nothing to do */
1692 /* This insn isn't in a delay slot. */
1693 fprintf (file
, "%s%s",
1694 code
== '?' ? "." : "",
1695 arc_condition_codes
[arc_ccfsm_current_cc
]);
1700 /* Output a nop if we're between a set of the condition codes,
1701 and a conditional branch. */
1702 if (last_insn_set_cc_p
)
1703 fputs ("nop\n\t", file
);
1706 fputs (arc_condition_codes
[get_arc_condition_code (x
)], file
);
1709 fputs (arc_condition_codes
[ARC_INVERSE_CONDITION_CODE
1710 (get_arc_condition_code (x
))],
1714 /* Write second word of DImode or DFmode reference,
1715 register or memory. */
1716 if (GET_CODE (x
) == REG
)
1717 fputs (reg_names
[REGNO (x
)+1], file
);
1718 else if (GET_CODE (x
) == MEM
)
1721 /* Handle possible auto-increment. Since it is pre-increment and
1722 we have already done it, we can just use an offset of four. */
1723 /* ??? This is taken from rs6000.c I think. I don't think it is
1724 currently necessary, but keep it around. */
1725 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
1726 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
1727 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 4));
1729 output_address (plus_constant (XEXP (x
, 0), 4));
1733 output_operand_lossage ("invalid operand to %%R code");
1736 if ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
1737 || GET_CODE (x
) == LABEL_REF
)
1739 fprintf (file
, "%%st(");
1740 output_addr_const (file
, x
);
1741 fprintf (file
, ")");
1747 if (GET_CODE (x
) == REG
)
1749 /* L = least significant word, H = most significant word */
1750 if ((TARGET_BIG_ENDIAN
!= 0) ^ (code
== 'L'))
1751 fputs (reg_names
[REGNO (x
)], file
);
1753 fputs (reg_names
[REGNO (x
)+1], file
);
1755 else if (GET_CODE (x
) == CONST_INT
1756 || GET_CODE (x
) == CONST_DOUBLE
)
1760 split_double (x
, &first
, &second
);
1761 fprintf (file
, "0x%08lx",
1762 (long)(code
== 'L' ? INTVAL (first
) : INTVAL (second
)));
1765 output_operand_lossage ("invalid operand to %%H/%%L code");
1771 if (GET_CODE (x
) != CONST_DOUBLE
1772 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
)
1775 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (x
), sizeof (str
), 0, 1);
1776 fprintf (file
, "%s", str
);
1780 /* Output a load/store with update indicator if appropriate. */
1781 if (GET_CODE (x
) == MEM
)
1783 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
1784 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
1788 output_operand_lossage ("invalid operand to %%U code");
1791 /* Output cache bypass indicator for a load/store insn. Volatile memory
1792 refs are defined to use the cache bypass mechanism. */
1793 if (GET_CODE (x
) == MEM
)
1795 if (MEM_VOLATILE_P (x
))
1796 fputs (".di", file
);
1799 output_operand_lossage ("invalid operand to %%V code");
1802 /* Do nothing special. */
1806 output_operand_lossage ("invalid operand output code");
1809 switch (GET_CODE (x
))
1812 fputs (reg_names
[REGNO (x
)], file
);
1816 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
1817 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
1818 GET_MODE_SIZE (GET_MODE (x
))));
1819 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
1820 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
1821 - GET_MODE_SIZE (GET_MODE (x
))));
1823 output_address (XEXP (x
, 0));
1827 /* We handle SFmode constants here as output_addr_const doesn't. */
1828 if (GET_MODE (x
) == SFmode
)
1833 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1834 REAL_VALUE_TO_TARGET_SINGLE (d
, l
);
1835 fprintf (file
, "0x%08lx", l
);
1838 /* Fall through. Let output_addr_const deal with it. */
1840 output_addr_const (file
, x
);
1845 /* Print a memory address as an operand to reference that memory location. */
1848 arc_print_operand_address (FILE *file
, rtx addr
)
1850 register rtx base
, index
= 0;
1853 switch (GET_CODE (addr
))
1856 fputs (reg_names
[REGNO (addr
)], file
);
1859 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr
))
1861 fprintf (file
, "%%st(");
1862 output_addr_const (file
, addr
);
1863 fprintf (file
, ")");
1866 output_addr_const (file
, addr
);
1869 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
1870 offset
= INTVAL (XEXP (addr
, 0)), base
= XEXP (addr
, 1);
1871 else if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
1872 offset
= INTVAL (XEXP (addr
, 1)), base
= XEXP (addr
, 0);
1874 base
= XEXP (addr
, 0), index
= XEXP (addr
, 1);
1875 if (GET_CODE (base
) != REG
)
1877 fputs (reg_names
[REGNO (base
)], file
);
1881 fprintf (file
, ",%d", offset
);
1883 else if (GET_CODE (index
) == REG
)
1884 fprintf (file
, ",%s", reg_names
[REGNO (index
)]);
1885 else if (GET_CODE (index
) == SYMBOL_REF
)
1886 fputc (',', file
), output_addr_const (file
, index
);
1892 /* We shouldn't get here as we've lost the mode of the memory object
1893 (which says how much to inc/dec by. */
1897 output_addr_const (file
, addr
);
1902 /* Update compare/branch separation marker. */
1905 record_cc_ref (rtx insn
)
1907 last_insn_set_cc_p
= current_insn_set_cc_p
;
1909 switch (get_attr_cond (insn
))
1914 if (get_attr_length (insn
) == 1)
1915 current_insn_set_cc_p
= 1;
1917 current_insn_set_cc_p
= 0;
1920 current_insn_set_cc_p
= 0;
1925 /* Conditional execution support.
1927 This is based on the ARM port but for now is much simpler.
1929 A finite state machine takes care of noticing whether or not instructions
1930 can be conditionally executed, and thus decrease execution time and code
1931 size by deleting branch instructions. The fsm is controlled by
1932 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1933 in the .md file for the branch insns also have a hand in this. */
1935 /* The state of the fsm controlling condition codes are:
1936 0: normal, do nothing special
1937 1: don't output this insn
1938 2: don't output this insn
1939 3: make insns conditional
1940 4: make insns conditional
1942 State transitions (state->state by whom, under what condition):
1943 0 -> 1 final_prescan_insn, if insn is conditional branch
1944 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1945 1 -> 3 branch patterns, after having not output the conditional branch
1946 2 -> 4 branch patterns, after having not output the conditional branch
1947 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1948 (the target label has CODE_LABEL_NUMBER equal to
1949 arc_ccfsm_target_label).
1950 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1952 If the jump clobbers the conditions then we use states 2 and 4.
1954 A similar thing can be done with conditional return insns.
1956 We also handle separating branches from sets of the condition code.
1957 This is done here because knowledge of the ccfsm state is required,
1958 we may not be outputting the branch. */
1961 arc_final_prescan_insn (rtx insn
,
1962 rtx
*opvec ATTRIBUTE_UNUSED
,
1963 int noperands ATTRIBUTE_UNUSED
)
1965 /* BODY will hold the body of INSN. */
1966 register rtx body
= PATTERN (insn
);
1968 /* This will be 1 if trying to repeat the trick (ie: do the `else' part of
1969 an if/then/else), and things need to be reversed. */
1972 /* If we start with a return insn, we only succeed if we find another one. */
1973 int seeking_return
= 0;
1975 /* START_INSN will hold the insn from where we start looking. This is the
1976 first insn after the following code_label if REVERSE is true. */
1977 rtx start_insn
= insn
;
1979 /* Update compare/branch separation marker. */
1980 record_cc_ref (insn
);
1982 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1983 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1984 final_scan_insn which has `optimize' as a local. */
1985 if (optimize
< 2 || TARGET_NO_COND_EXEC
)
1988 /* If in state 4, check if the target branch is reached, in order to
1989 change back to state 0. */
1990 if (arc_ccfsm_state
== 4)
1992 if (insn
== arc_ccfsm_target_insn
)
1994 arc_ccfsm_target_insn
= NULL
;
1995 arc_ccfsm_state
= 0;
2000 /* If in state 3, it is possible to repeat the trick, if this insn is an
2001 unconditional branch to a label, and immediately following this branch
2002 is the previous target label which is only used once, and the label this
2003 branch jumps to is not too far off. Or in other words "we've done the
2004 `then' part, see if we can do the `else' part." */
2005 if (arc_ccfsm_state
== 3)
2007 if (simplejump_p (insn
))
2009 start_insn
= next_nonnote_insn (start_insn
);
2010 if (GET_CODE (start_insn
) == BARRIER
)
2012 /* ??? Isn't this always a barrier? */
2013 start_insn
= next_nonnote_insn (start_insn
);
2015 if (GET_CODE (start_insn
) == CODE_LABEL
2016 && CODE_LABEL_NUMBER (start_insn
) == arc_ccfsm_target_label
2017 && LABEL_NUSES (start_insn
) == 1)
2022 else if (GET_CODE (body
) == RETURN
)
2024 start_insn
= next_nonnote_insn (start_insn
);
2025 if (GET_CODE (start_insn
) == BARRIER
)
2026 start_insn
= next_nonnote_insn (start_insn
);
2027 if (GET_CODE (start_insn
) == CODE_LABEL
2028 && CODE_LABEL_NUMBER (start_insn
) == arc_ccfsm_target_label
2029 && LABEL_NUSES (start_insn
) == 1)
2041 if (GET_CODE (insn
) != JUMP_INSN
)
2044 /* This jump might be paralleled with a clobber of the condition codes,
2045 the jump should always come first. */
2046 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
2047 body
= XVECEXP (body
, 0, 0);
2050 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2051 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
2053 int insns_skipped
= 0, fail
= FALSE
, succeed
= FALSE
;
2054 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2055 int then_not_else
= TRUE
;
2056 /* Nonzero if next insn must be the target label. */
2057 int next_must_be_target_label_p
;
2058 rtx this_insn
= start_insn
, label
= 0;
2060 /* Register the insn jumped to. */
2063 if (!seeking_return
)
2064 label
= XEXP (SET_SRC (body
), 0);
2066 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
2067 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
2068 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
2070 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
2071 then_not_else
= FALSE
;
2073 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
2075 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
2078 then_not_else
= FALSE
;
2083 /* See how many insns this branch skips, and what kind of insns. If all
2084 insns are okay, and the label or unconditional branch to the same
2085 label is not too far away, succeed. */
2086 for (insns_skipped
= 0, next_must_be_target_label_p
= FALSE
;
2087 !fail
&& !succeed
&& insns_skipped
< MAX_INSNS_SKIPPED
;
2092 this_insn
= next_nonnote_insn (this_insn
);
2096 if (next_must_be_target_label_p
)
2098 if (GET_CODE (this_insn
) == BARRIER
)
2100 if (GET_CODE (this_insn
) == CODE_LABEL
2101 && this_insn
== label
)
2103 arc_ccfsm_state
= 1;
2111 scanbody
= PATTERN (this_insn
);
2113 switch (GET_CODE (this_insn
))
2116 /* Succeed if it is the target label, otherwise fail since
2117 control falls in from somewhere else. */
2118 if (this_insn
== label
)
2120 arc_ccfsm_state
= 1;
2128 /* Succeed if the following insn is the target label.
2130 If return insns are used then the last insn in a function
2131 will be a barrier. */
2132 next_must_be_target_label_p
= TRUE
;
2136 /* Can handle a call insn if there are no insns after it.
2137 IE: The next "insn" is the target label. We don't have to
2138 worry about delay slots as such insns are SEQUENCE's inside
2139 INSN's. ??? It is possible to handle such insns though. */
2140 if (get_attr_cond (this_insn
) == COND_CANUSE
)
2141 next_must_be_target_label_p
= TRUE
;
2147 /* If this is an unconditional branch to the same label, succeed.
2148 If it is to another label, do nothing. If it is conditional,
2150 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2152 if (GET_CODE (scanbody
) == SET
2153 && GET_CODE (SET_DEST (scanbody
)) == PC
)
2155 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
2156 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
2158 arc_ccfsm_state
= 2;
2161 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
2164 else if (GET_CODE (scanbody
) == RETURN
2167 arc_ccfsm_state
= 2;
2170 else if (GET_CODE (scanbody
) == PARALLEL
)
2172 if (get_attr_cond (this_insn
) != COND_CANUSE
)
2178 /* We can only do this with insns that can use the condition
2179 codes (and don't set them). */
2180 if (GET_CODE (scanbody
) == SET
2181 || GET_CODE (scanbody
) == PARALLEL
)
2183 if (get_attr_cond (this_insn
) != COND_CANUSE
)
2186 /* We can't handle other insns like sequences. */
2198 if ((!seeking_return
) && (arc_ccfsm_state
== 1 || reverse
))
2199 arc_ccfsm_target_label
= CODE_LABEL_NUMBER (label
);
2200 else if (seeking_return
|| arc_ccfsm_state
== 2)
2202 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
2204 this_insn
= next_nonnote_insn (this_insn
);
2205 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
2206 || GET_CODE (this_insn
) == CODE_LABEL
))
2211 /* Oh dear! we ran off the end, give up. */
2212 extract_insn_cached (insn
);
2213 arc_ccfsm_state
= 0;
2214 arc_ccfsm_target_insn
= NULL
;
2217 arc_ccfsm_target_insn
= this_insn
;
2222 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2225 arc_ccfsm_current_cc
= get_arc_condition_code (XEXP (SET_SRC (body
),
2228 if (reverse
|| then_not_else
)
2229 arc_ccfsm_current_cc
= ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc
);
2232 /* Restore recog_data. Getting the attributes of other insns can
2233 destroy this array, but final.c assumes that it remains intact
2234 across this call. */
2235 extract_insn_cached (insn
);
2239 /* Record that we are currently outputting label NUM with prefix PREFIX.
2240 It it's the label we're looking for, reset the ccfsm machinery.
2242 Called from (*targetm.asm_out.internal_label). */
2245 arc_ccfsm_at_label (const char *prefix
, int num
)
2247 if (arc_ccfsm_state
== 3 && arc_ccfsm_target_label
== num
2248 && !strcmp (prefix
, "L"))
2250 arc_ccfsm_state
= 0;
2251 arc_ccfsm_target_insn
= NULL_RTX
;
2255 /* See if the current insn, which is a conditional branch, is to be
2259 arc_ccfsm_branch_deleted_p (void)
2261 if (arc_ccfsm_state
== 1 || arc_ccfsm_state
== 2)
2266 /* Record a branch isn't output because subsequent insns can be
2270 arc_ccfsm_record_branch_deleted (void)
2272 /* Indicate we're conditionalizing insns now. */
2273 arc_ccfsm_state
+= 2;
2275 /* If the next insn is a subroutine call, we still need a nop between the
2276 cc setter and user. We need to undo the effect of calling record_cc_ref
2277 for the just deleted branch. */
2278 current_insn_set_cc_p
= last_insn_set_cc_p
;
2282 arc_va_start (tree valist
, rtx nextarg
)
2284 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2285 if (current_function_args_info
< 8
2286 && (current_function_args_info
& 1))
2287 nextarg
= plus_constant (nextarg
, UNITS_PER_WORD
);
2289 std_expand_builtin_va_start (valist
, nextarg
);
2293 arc_va_arg (tree valist
, tree type
)
2297 tree type_ptr
= build_pointer_type (type
);
2299 /* All aggregates are passed by reference. All scalar types larger
2300 than 8 bytes are passed by reference. */
2302 if (AGGREGATE_TYPE_P (type
) || int_size_in_bytes (type
) > 8)
2304 tree type_ptr_ptr
= build_pointer_type (type_ptr
);
2306 addr
= build (INDIRECT_REF
, type_ptr
,
2307 build (NOP_EXPR
, type_ptr_ptr
, valist
));
2309 incr
= build (PLUS_EXPR
, TREE_TYPE (valist
),
2310 valist
, build_int_2 (UNITS_PER_WORD
, 0));
2314 HOST_WIDE_INT align
, rounded_size
;
2316 /* Compute the rounded size of the type. */
2317 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
2318 rounded_size
= (((TREE_INT_CST_LOW (TYPE_SIZE (type
)) / BITS_PER_UNIT
2319 + align
- 1) / align
) * align
);
2321 /* Align 8 byte operands. */
2323 if (TYPE_ALIGN (type
) > BITS_PER_WORD
)
2325 /* AP = (TYPE *)(((int)AP + 7) & -8) */
2327 addr
= build (NOP_EXPR
, integer_type_node
, valist
);
2328 addr
= fold (build (PLUS_EXPR
, integer_type_node
, addr
,
2329 build_int_2 (7, 0)));
2330 addr
= fold (build (BIT_AND_EXPR
, integer_type_node
, addr
,
2331 build_int_2 (-8, 0)));
2332 addr
= fold (build (NOP_EXPR
, TREE_TYPE (valist
), addr
));
2335 /* The increment is always rounded_size past the aligned pointer. */
2336 incr
= fold (build (PLUS_EXPR
, TREE_TYPE (addr
), addr
,
2337 build_int_2 (rounded_size
, 0)));
2339 /* Adjust the pointer in big-endian mode. */
2340 if (BYTES_BIG_ENDIAN
)
2343 adj
= TREE_INT_CST_LOW (TYPE_SIZE (type
)) / BITS_PER_UNIT
;
2344 if (rounded_size
> align
)
2347 addr
= fold (build (PLUS_EXPR
, TREE_TYPE (addr
), addr
,
2348 build_int_2 (rounded_size
- adj
, 0)));
2352 /* Evaluate the data address. */
2353 addr_rtx
= expand_expr (addr
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
2354 addr_rtx
= copy_to_reg (addr_rtx
);
2356 /* Compute new value for AP. */
2357 incr
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, incr
);
2358 TREE_SIDE_EFFECTS (incr
) = 1;
2359 expand_expr (incr
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2364 /* This is how to output a definition of an internal numbered label where
2365 PREFIX is the class of label and NUM is the number within the class. */
2368 arc_internal_label (FILE *stream
, const char *prefix
, unsigned long labelno
)
2370 arc_ccfsm_at_label (prefix
, labelno
);
2371 default_internal_label (stream
, prefix
, labelno
);
2374 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2377 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED
)
2380 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2381 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2382 and we'll get another suffix added on if -mmangle-cpu. */
2383 if (TARGET_MANGLE_CPU_LIBGCC
)
2385 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2386 XSTR (SYMREF
, 0), XSTR (SYMREF
, 0),
2392 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2395 arc_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
2397 if (AGGREGATE_TYPE_P (type
))
2401 HOST_WIDE_INT size
= int_size_in_bytes (type
);
2402 return (size
== -1 || size
> 8);