Update concepts branch to revision 131834
[official-gcc.git] / gcc / config / arc / arc.c
blobb5e2cb965477bdf26cde247ca3b4531d04251946
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
3 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "recog.h"
40 #include "toplev.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
45 /* Which cpu we're compiling for. */
46 int arc_cpu_type;
48 /* Name of mangle string to add to symbols to separate code compiled for each
49 cpu (or NULL). */
50 const char *arc_mangle_cpu;
52 /* Save the operands last given to a compare for use when we
53 generate a scc or bcc insn. */
54 rtx arc_compare_op0, arc_compare_op1;
56 /* Name of text, data, and rodata sections used in varasm.c. */
57 const char *arc_text_section;
58 const char *arc_data_section;
59 const char *arc_rodata_section;
61 /* Array of valid operand punctuation characters. */
62 char arc_punct_chars[256];
64 /* Variables used by arc_final_prescan_insn to implement conditional
65 execution. */
66 static int arc_ccfsm_state;
67 static int arc_ccfsm_current_cc;
68 static rtx arc_ccfsm_target_insn;
69 static int arc_ccfsm_target_label;
71 /* The maximum number of insns skipped which will be conditionalised if
72 possible. */
73 #define MAX_INSNS_SKIPPED 3
75 /* A nop is needed between a 4 byte insn that sets the condition codes and
76 a branch that uses them (the same isn't true for an 8 byte insn that sets
77 the condition codes). Set by arc_final_prescan_insn. Used by
78 arc_print_operand. */
79 static int last_insn_set_cc_p;
80 static int current_insn_set_cc_p;
81 static bool arc_handle_option (size_t, const char *, int);
82 static void record_cc_ref (rtx);
83 static void arc_init_reg_tables (void);
84 static int get_arc_condition_code (rtx);
85 const struct attribute_spec arc_attribute_table[];
86 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
87 static bool arc_assemble_integer (rtx, unsigned int, int);
88 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
89 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
90 static void arc_file_start (void);
91 static void arc_internal_label (FILE *, const char *, unsigned long);
92 static void arc_va_start (tree, rtx);
93 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
94 tree, int *, int);
95 static bool arc_rtx_costs (rtx, int, int, int *);
96 static int arc_address_cost (rtx);
97 static void arc_external_libcall (rtx);
98 static bool arc_return_in_memory (const_tree, const_tree);
99 static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
100 const_tree, bool);
102 /* Initialize the GCC target structure. */
103 #undef TARGET_ASM_ALIGNED_HI_OP
104 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
105 #undef TARGET_ASM_ALIGNED_SI_OP
106 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
107 #undef TARGET_ASM_INTEGER
108 #define TARGET_ASM_INTEGER arc_assemble_integer
110 #undef TARGET_ASM_FUNCTION_PROLOGUE
111 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
112 #undef TARGET_ASM_FUNCTION_EPILOGUE
113 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
114 #undef TARGET_ASM_FILE_START
115 #define TARGET_ASM_FILE_START arc_file_start
116 #undef TARGET_ATTRIBUTE_TABLE
117 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
118 #undef TARGET_ASM_INTERNAL_LABEL
119 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
120 #undef TARGET_ASM_EXTERNAL_LIBCALL
121 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
123 #undef TARGET_HANDLE_OPTION
124 #define TARGET_HANDLE_OPTION arc_handle_option
126 #undef TARGET_RTX_COSTS
127 #define TARGET_RTX_COSTS arc_rtx_costs
128 #undef TARGET_ADDRESS_COST
129 #define TARGET_ADDRESS_COST arc_address_cost
131 #undef TARGET_PROMOTE_FUNCTION_ARGS
132 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
133 #undef TARGET_PROMOTE_FUNCTION_RETURN
134 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
135 #undef TARGET_PROMOTE_PROTOTYPES
136 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
138 #undef TARGET_RETURN_IN_MEMORY
139 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
140 #undef TARGET_PASS_BY_REFERENCE
141 #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
142 #undef TARGET_CALLEE_COPIES
143 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
145 #undef TARGET_SETUP_INCOMING_VARARGS
146 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
148 #undef TARGET_EXPAND_BUILTIN_VA_START
149 #define TARGET_EXPAND_BUILTIN_VA_START arc_va_start
151 struct gcc_target targetm = TARGET_INITIALIZER;
153 /* Implement TARGET_HANDLE_OPTION. */
155 static bool
156 arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
158 switch (code)
160 case OPT_mcpu_:
161 return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
163 default:
164 return true;
168 /* Called by OVERRIDE_OPTIONS to initialize various things. */
170 void
171 arc_init (void)
173 char *tmp;
175 /* Set the pseudo-ops for the various standard sections. */
176 arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
177 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
178 arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
179 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
180 arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
181 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
183 arc_init_reg_tables ();
185 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
186 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
187 arc_punct_chars['#'] = 1;
188 arc_punct_chars['*'] = 1;
189 arc_punct_chars['?'] = 1;
190 arc_punct_chars['!'] = 1;
191 arc_punct_chars['~'] = 1;
194 /* The condition codes of the ARC, and the inverse function. */
195 static const char *const arc_condition_codes[] =
197 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
198 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
201 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
203 /* Returns the index of the ARC condition code string in
204 `arc_condition_codes'. COMPARISON should be an rtx like
205 `(eq (...) (...))'. */
207 static int
208 get_arc_condition_code (rtx comparison)
210 switch (GET_CODE (comparison))
212 case EQ : return 2;
213 case NE : return 3;
214 case GT : return 10;
215 case LE : return 11;
216 case GE : return 12;
217 case LT : return 13;
218 case GTU : return 14;
219 case LEU : return 15;
220 case LTU : return 6;
221 case GEU : return 7;
222 default : gcc_unreachable ();
224 /*NOTREACHED*/
225 return (42);
228 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
229 return the mode to be used for the comparison. */
231 enum machine_mode
232 arc_select_cc_mode (enum rtx_code op,
233 rtx x ATTRIBUTE_UNUSED,
234 rtx y ATTRIBUTE_UNUSED)
236 switch (op)
238 case EQ :
239 case NE :
240 return CCZNmode;
241 default :
242 switch (GET_CODE (x))
244 case AND :
245 case IOR :
246 case XOR :
247 case SIGN_EXTEND :
248 case ZERO_EXTEND :
249 return CCZNmode;
250 case ASHIFT :
251 case ASHIFTRT :
252 case LSHIFTRT :
253 return CCZNCmode;
254 default:
255 break;
258 return CCmode;
261 /* Vectors to keep interesting information about registers where it can easily
262 be got. We use to use the actual mode value as the bit number, but there
263 is (or may be) more than 32 modes now. Instead we use two tables: one
264 indexed by hard register number, and one indexed by mode. */
266 /* The purpose of arc_mode_class is to shrink the range of modes so that
267 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
268 mapped into one arc_mode_class mode. */
270 enum arc_mode_class {
271 C_MODE,
272 S_MODE, D_MODE, T_MODE, O_MODE,
273 SF_MODE, DF_MODE, TF_MODE, OF_MODE
276 /* Modes for condition codes. */
277 #define C_MODES (1 << (int) C_MODE)
279 /* Modes for single-word and smaller quantities. */
280 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
282 /* Modes for double-word and smaller quantities. */
283 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
285 /* Modes for quad-word and smaller quantities. */
286 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
288 /* Value is 1 if register/mode pair is acceptable on arc. */
290 const unsigned int arc_hard_regno_mode_ok[] = {
291 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
292 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
293 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
294 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
296 /* ??? Leave these as S_MODES for now. */
297 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
298 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
299 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
300 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
303 unsigned int arc_mode_class [NUM_MACHINE_MODES];
305 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
307 static void
308 arc_init_reg_tables (void)
310 int i;
312 for (i = 0; i < NUM_MACHINE_MODES; i++)
314 switch (GET_MODE_CLASS (i))
316 case MODE_INT:
317 case MODE_PARTIAL_INT:
318 case MODE_COMPLEX_INT:
319 if (GET_MODE_SIZE (i) <= 4)
320 arc_mode_class[i] = 1 << (int) S_MODE;
321 else if (GET_MODE_SIZE (i) == 8)
322 arc_mode_class[i] = 1 << (int) D_MODE;
323 else if (GET_MODE_SIZE (i) == 16)
324 arc_mode_class[i] = 1 << (int) T_MODE;
325 else if (GET_MODE_SIZE (i) == 32)
326 arc_mode_class[i] = 1 << (int) O_MODE;
327 else
328 arc_mode_class[i] = 0;
329 break;
330 case MODE_FLOAT:
331 case MODE_COMPLEX_FLOAT:
332 if (GET_MODE_SIZE (i) <= 4)
333 arc_mode_class[i] = 1 << (int) SF_MODE;
334 else if (GET_MODE_SIZE (i) == 8)
335 arc_mode_class[i] = 1 << (int) DF_MODE;
336 else if (GET_MODE_SIZE (i) == 16)
337 arc_mode_class[i] = 1 << (int) TF_MODE;
338 else if (GET_MODE_SIZE (i) == 32)
339 arc_mode_class[i] = 1 << (int) OF_MODE;
340 else
341 arc_mode_class[i] = 0;
342 break;
343 case MODE_CC:
344 arc_mode_class[i] = 1 << (int) C_MODE;
345 break;
346 default:
347 arc_mode_class[i] = 0;
348 break;
352 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
354 if (i < 60)
355 arc_regno_reg_class[i] = GENERAL_REGS;
356 else if (i == 60)
357 arc_regno_reg_class[i] = LPCOUNT_REG;
358 else if (i == 61)
359 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
360 else
361 arc_regno_reg_class[i] = NO_REGS;
365 /* ARC specific attribute support.
367 The ARC has these attributes:
368 interrupt - for interrupt functions
371 const struct attribute_spec arc_attribute_table[] =
373 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
374 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
375 { NULL, 0, 0, false, false, false, NULL }
378 /* Handle an "interrupt" attribute; arguments as in
379 struct attribute_spec.handler. */
380 static tree
381 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
382 tree name,
383 tree args,
384 int flags ATTRIBUTE_UNUSED,
385 bool *no_add_attrs)
387 tree value = TREE_VALUE (args);
389 if (TREE_CODE (value) != STRING_CST)
391 warning (OPT_Wattributes,
392 "argument of %qs attribute is not a string constant",
393 IDENTIFIER_POINTER (name));
394 *no_add_attrs = true;
396 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
397 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
399 warning (OPT_Wattributes,
400 "argument of %qs attribute is not \"ilink1\" or \"ilink2\"",
401 IDENTIFIER_POINTER (name));
402 *no_add_attrs = true;
405 return NULL_TREE;
409 /* Acceptable arguments to the call insn. */
412 call_address_operand (rtx op, enum machine_mode mode)
414 return (symbolic_operand (op, mode)
415 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
416 || (GET_CODE (op) == REG));
420 call_operand (rtx op, enum machine_mode mode)
422 if (GET_CODE (op) != MEM)
423 return 0;
424 op = XEXP (op, 0);
425 return call_address_operand (op, mode);
428 /* Returns 1 if OP is a symbol reference. */
431 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
433 switch (GET_CODE (op))
435 case SYMBOL_REF:
436 case LABEL_REF:
437 case CONST :
438 return 1;
439 default:
440 return 0;
444 /* Return truth value of statement that OP is a symbolic memory
445 operand of mode MODE. */
448 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
450 if (GET_CODE (op) == SUBREG)
451 op = SUBREG_REG (op);
452 if (GET_CODE (op) != MEM)
453 return 0;
454 op = XEXP (op, 0);
455 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
456 || GET_CODE (op) == LABEL_REF);
459 /* Return true if OP is a short immediate (shimm) value. */
462 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
464 if (GET_CODE (op) != CONST_INT)
465 return 0;
466 return SMALL_INT (INTVAL (op));
469 /* Return true if OP will require a long immediate (limm) value.
470 This is currently only used when calculating length attributes. */
473 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
475 switch (GET_CODE (op))
477 case SYMBOL_REF :
478 case LABEL_REF :
479 case CONST :
480 return 1;
481 case CONST_INT :
482 return !SMALL_INT (INTVAL (op));
483 case CONST_DOUBLE :
484 /* These can happen because large unsigned 32-bit constants are
485 represented this way (the multiplication patterns can cause these
486 to be generated). They also occur for SFmode values. */
487 return 1;
488 default:
489 break;
491 return 0;
494 /* Return true if OP is a MEM that when used as a load or store address will
495 require an 8 byte insn.
496 Load and store instructions don't allow the same possibilities but they're
497 similar enough that this one function will do.
498 This is currently only used when calculating length attributes. */
501 long_immediate_loadstore_operand (rtx op,
502 enum machine_mode mode ATTRIBUTE_UNUSED)
504 if (GET_CODE (op) != MEM)
505 return 0;
507 op = XEXP (op, 0);
508 switch (GET_CODE (op))
510 case SYMBOL_REF :
511 case LABEL_REF :
512 case CONST :
513 return 1;
514 case CONST_INT :
515 /* This must be handled as "st c,[limm]". Ditto for load.
516 Technically, the assembler could translate some possibilities to
517 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
518 assume that it does. */
519 return 1;
520 case CONST_DOUBLE :
521 /* These can happen because large unsigned 32-bit constants are
522 represented this way (the multiplication patterns can cause these
523 to be generated). They also occur for SFmode values. */
524 return 1;
525 case REG :
526 return 0;
527 case PLUS :
528 if (GET_CODE (XEXP (op, 1)) == CONST_INT
529 && !SMALL_INT (INTVAL (XEXP (op, 1))))
530 return 1;
531 return 0;
532 default:
533 break;
535 return 0;
538 /* Return true if OP is an acceptable argument for a single word
539 move source. */
542 move_src_operand (rtx op, enum machine_mode mode)
544 switch (GET_CODE (op))
546 case SYMBOL_REF :
547 case LABEL_REF :
548 case CONST :
549 return 1;
550 case CONST_INT :
551 return (LARGE_INT (INTVAL (op)));
552 case CONST_DOUBLE :
553 /* We can handle DImode integer constants in SImode if the value
554 (signed or unsigned) will fit in 32 bits. This is needed because
555 large unsigned 32-bit constants are represented as CONST_DOUBLEs. */
556 if (mode == SImode)
557 return arc_double_limm_p (op);
558 /* We can handle 32-bit floating point constants. */
559 if (mode == SFmode)
560 return GET_MODE (op) == SFmode;
561 return 0;
562 case REG :
563 return register_operand (op, mode);
564 case SUBREG :
565 /* (subreg (mem ...) ...) can occur here if the inner part was once a
566 pseudo-reg and is now a stack slot. */
567 if (GET_CODE (SUBREG_REG (op)) == MEM)
568 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
569 else
570 return register_operand (op, mode);
571 case MEM :
572 return address_operand (XEXP (op, 0), mode);
573 default :
574 return 0;
578 /* Return true if OP is an acceptable argument for a double word
579 move source. */
582 move_double_src_operand (rtx op, enum machine_mode mode)
584 switch (GET_CODE (op))
586 case REG :
587 return register_operand (op, mode);
588 case SUBREG :
589 /* (subreg (mem ...) ...) can occur here if the inner part was once a
590 pseudo-reg and is now a stack slot. */
591 if (GET_CODE (SUBREG_REG (op)) == MEM)
592 return move_double_src_operand (SUBREG_REG (op), mode);
593 else
594 return register_operand (op, mode);
595 case MEM :
596 /* Disallow auto inc/dec for now. */
597 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
598 || GET_CODE (XEXP (op, 0)) == PRE_INC)
599 return 0;
600 return address_operand (XEXP (op, 0), mode);
601 case CONST_INT :
602 case CONST_DOUBLE :
603 return 1;
604 default :
605 return 0;
609 /* Return true if OP is an acceptable argument for a move destination. */
612 move_dest_operand (rtx op, enum machine_mode mode)
614 switch (GET_CODE (op))
616 case REG :
617 return register_operand (op, mode);
618 case SUBREG :
619 /* (subreg (mem ...) ...) can occur here if the inner part was once a
620 pseudo-reg and is now a stack slot. */
621 if (GET_CODE (SUBREG_REG (op)) == MEM)
622 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
623 else
624 return register_operand (op, mode);
625 case MEM :
626 return address_operand (XEXP (op, 0), mode);
627 default :
628 return 0;
632 /* Return true if OP is valid load with update operand. */
635 load_update_operand (rtx op, enum machine_mode mode)
637 if (GET_CODE (op) != MEM
638 || GET_MODE (op) != mode)
639 return 0;
640 op = XEXP (op, 0);
641 if (GET_CODE (op) != PLUS
642 || GET_MODE (op) != Pmode
643 || !register_operand (XEXP (op, 0), Pmode)
644 || !nonmemory_operand (XEXP (op, 1), Pmode))
645 return 0;
646 return 1;
649 /* Return true if OP is valid store with update operand. */
652 store_update_operand (rtx op, enum machine_mode mode)
654 if (GET_CODE (op) != MEM
655 || GET_MODE (op) != mode)
656 return 0;
657 op = XEXP (op, 0);
658 if (GET_CODE (op) != PLUS
659 || GET_MODE (op) != Pmode
660 || !register_operand (XEXP (op, 0), Pmode)
661 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
662 && SMALL_INT (INTVAL (XEXP (op, 1)))))
663 return 0;
664 return 1;
667 /* Return true if OP is a non-volatile non-immediate operand.
668 Volatile memory refs require a special "cache-bypass" instruction
669 and only the standard movXX patterns are set up to handle them. */
672 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
674 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
675 return 0;
676 return nonimmediate_operand (op, mode);
679 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
680 to check the range carefully since this predicate is used in DImode
681 contexts. */
684 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
686 /* All allowed constants will fit a CONST_INT. */
687 return (GET_CODE (op) == CONST_INT
688 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
691 /* Accept integer operands in the range 0..0xffffffff. We have to check the
692 range carefully since this predicate is used in DImode contexts. Also, we
693 need some extra crud to make it work when hosted on 64-bit machines. */
696 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
698 #if HOST_BITS_PER_WIDE_INT > 32
699 /* All allowed constants will fit a CONST_INT. */
700 return (GET_CODE (op) == CONST_INT
701 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
702 #else
703 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
704 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
705 #endif
708 /* Return 1 if OP is a comparison operator valid for the mode of CC.
709 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
711 Some insns only set a few bits in the condition code. So only allow those
712 comparisons that use the bits that are valid. */
715 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
717 enum rtx_code code;
718 if (!COMPARISON_P (op))
719 return 0;
721 code = GET_CODE (op);
722 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
723 return (code == EQ || code == NE);
724 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
725 return (code == EQ || code == NE
726 || code == LTU || code == GEU || code == GTU || code == LEU);
727 return 1;
730 /* Misc. utilities. */
732 /* X and Y are two things to compare using CODE. Emit the compare insn and
733 return the rtx for the cc reg in the proper mode. */
736 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
738 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
739 rtx cc_reg;
741 cc_reg = gen_rtx_REG (mode, 61);
743 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
744 gen_rtx_COMPARE (mode, x, y)));
746 return cc_reg;
749 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
750 We assume the value can be either signed or unsigned. */
753 arc_double_limm_p (rtx value)
755 HOST_WIDE_INT low, high;
757 gcc_assert (GET_CODE (value) == CONST_DOUBLE);
759 low = CONST_DOUBLE_LOW (value);
760 high = CONST_DOUBLE_HIGH (value);
762 if (low & 0x80000000)
764 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
765 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
766 == - (unsigned HOST_WIDE_INT) 0x80000000)
767 && high == -1));
769 else
771 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
775 /* Do any needed setup for a variadic function. For the ARC, we must
776 create a register parameter block, and then copy any anonymous arguments
777 in registers to memory.
779 CUM has not been updated for the last named argument which has type TYPE
780 and mode MODE, and we rely on this fact.
782 We do things a little weird here. We're supposed to only allocate space
783 for the anonymous arguments. However we need to keep the stack eight byte
784 aligned. So we round the space up if necessary, and leave it to va_start
785 to compensate. */
787 static void
788 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
789 enum machine_mode mode,
790 tree type ATTRIBUTE_UNUSED,
791 int *pretend_size,
792 int no_rtl)
794 int first_anon_arg;
796 /* All BLKmode values are passed by reference. */
797 gcc_assert (mode != BLKmode);
799 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
800 / UNITS_PER_WORD);
802 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
804 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
805 int first_reg_offset = first_anon_arg;
806 /* Size in words to "pretend" allocate. */
807 int size = MAX_ARC_PARM_REGS - first_reg_offset;
808 /* Extra slop to keep stack eight byte aligned. */
809 int align_slop = size & 1;
810 rtx regblock;
812 regblock = gen_rtx_MEM (BLKmode,
813 plus_constant (arg_pointer_rtx,
814 FIRST_PARM_OFFSET (0)
815 + align_slop * UNITS_PER_WORD));
816 set_mem_alias_set (regblock, get_varargs_alias_set ());
817 set_mem_align (regblock, BITS_PER_WORD);
818 move_block_from_reg (first_reg_offset, regblock,
819 MAX_ARC_PARM_REGS - first_reg_offset);
821 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
822 * UNITS_PER_WORD);
826 /* Cost functions. */
828 /* Compute a (partial) cost for rtx X. Return true if the complete
829 cost has been computed, and false if subexpressions should be
830 scanned. In either case, *TOTAL contains the cost result. */
832 static bool
833 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
835 switch (code)
837 /* Small integers are as cheap as registers. 4 byte values can
838 be fetched as immediate constants - let's give that the cost
839 of an extra insn. */
840 case CONST_INT:
841 if (SMALL_INT (INTVAL (x)))
843 *total = 0;
844 return true;
846 /* FALLTHRU */
848 case CONST:
849 case LABEL_REF:
850 case SYMBOL_REF:
851 *total = COSTS_N_INSNS (1);
852 return true;
854 case CONST_DOUBLE:
856 rtx high, low;
857 split_double (x, &high, &low);
858 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
859 + !SMALL_INT (INTVAL (low)));
860 return true;
863 /* Encourage synth_mult to find a synthetic multiply when reasonable.
864 If we need more than 12 insns to do a multiply, then go out-of-line,
865 since the call overhead will be < 10% of the cost of the multiply. */
866 case ASHIFT:
867 case ASHIFTRT:
868 case LSHIFTRT:
869 if (TARGET_SHIFTER)
870 *total = COSTS_N_INSNS (1);
871 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
872 *total = COSTS_N_INSNS (16);
873 else
874 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
875 return false;
877 default:
878 return false;
883 /* Provide the costs of an addressing mode that contains ADDR.
884 If ADDR is not a valid address, its cost is irrelevant. */
886 static int
887 arc_address_cost (rtx addr)
889 switch (GET_CODE (addr))
891 case REG :
892 return 1;
894 case LABEL_REF :
895 case SYMBOL_REF :
896 case CONST :
897 return 2;
899 case PLUS :
901 register rtx plus0 = XEXP (addr, 0);
902 register rtx plus1 = XEXP (addr, 1);
904 if (GET_CODE (plus0) != REG)
905 break;
907 switch (GET_CODE (plus1))
909 case CONST_INT :
910 return SMALL_INT (plus1) ? 1 : 2;
911 case CONST :
912 case SYMBOL_REF :
913 case LABEL_REF :
914 return 2;
915 default:
916 break;
918 break;
920 default:
921 break;
924 return 4;
927 /* Function prologue/epilogue handlers. */
929 /* ARC stack frames look like:
931 Before call After call
932 +-----------------------+ +-----------------------+
933 | | | |
934 high | local variables, | | local variables, |
935 mem | reg save area, etc. | | reg save area, etc. |
936 | | | |
937 +-----------------------+ +-----------------------+
938 | | | |
939 | arguments on stack. | | arguments on stack. |
940 | | | |
941 SP+16->+-----------------------+FP+48->+-----------------------+
942 | 4 word save area for | | reg parm save area, |
943 | return addr, prev %fp | | only created for |
944 SP+0->+-----------------------+ | variable argument |
945 | functions |
946 FP+16->+-----------------------+
947 | 4 word save area for |
948 | return addr, prev %fp |
949 FP+0->+-----------------------+
950 | |
951 | local variables |
952 | |
953 +-----------------------+
954 | |
955 | register save area |
956 | |
957 +-----------------------+
958 | |
959 | alloca allocations |
960 | |
961 +-----------------------+
962 | |
963 | arguments on stack |
964 | |
965 SP+16->+-----------------------+
966 low | 4 word save area for |
967 memory | return addr, prev %fp |
968 SP+0->+-----------------------+
970 Notes:
971 1) The "reg parm save area" does not exist for non variable argument fns.
972 The "reg parm save area" can be eliminated completely if we created our
973 own va-arc.h, but that has tradeoffs as well (so it's not done). */
975 /* Structure to be filled in by arc_compute_frame_size with register
976 save masks, and offsets for the current function. */
977 struct arc_frame_info
979 unsigned int total_size; /* # bytes that the entire frame takes up. */
980 unsigned int extra_size; /* # bytes of extra stuff. */
981 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
982 unsigned int args_size; /* # bytes that outgoing arguments take up. */
983 unsigned int reg_size; /* # bytes needed to store regs. */
984 unsigned int var_size; /* # bytes that variables take up. */
985 unsigned int reg_offset; /* Offset from new sp to store regs. */
986 unsigned int gmask; /* Mask of saved gp registers. */
987 int initialized; /* Nonzero if frame size already calculated. */
990 /* Current frame information calculated by arc_compute_frame_size. */
991 static struct arc_frame_info current_frame_info;
993 /* Zero structure to initialize current_frame_info. */
994 static struct arc_frame_info zero_frame_info;
996 /* Type of function DECL.
998 The result is cached. To reset the cache at the end of a function,
999 call with DECL = NULL_TREE. */
1001 enum arc_function_type
1002 arc_compute_function_type (tree decl)
1004 tree a;
1005 /* Cached value. */
1006 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1007 /* Last function we were called for. */
1008 static tree last_fn = NULL_TREE;
1010 /* Resetting the cached value? */
1011 if (decl == NULL_TREE)
1013 fn_type = ARC_FUNCTION_UNKNOWN;
1014 last_fn = NULL_TREE;
1015 return fn_type;
1018 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1019 return fn_type;
1021 /* Assume we have a normal function (not an interrupt handler). */
1022 fn_type = ARC_FUNCTION_NORMAL;
1024 /* Now see if this is an interrupt handler. */
1025 for (a = DECL_ATTRIBUTES (current_function_decl);
1027 a = TREE_CHAIN (a))
1029 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1031 if (name == get_identifier ("__interrupt__")
1032 && list_length (args) == 1
1033 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1035 tree value = TREE_VALUE (args);
1037 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1038 fn_type = ARC_FUNCTION_ILINK1;
1039 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1040 fn_type = ARC_FUNCTION_ILINK2;
1041 else
1042 gcc_unreachable ();
1043 break;
1047 last_fn = decl;
1048 return fn_type;
1051 #define ILINK1_REGNUM 29
1052 #define ILINK2_REGNUM 30
1053 #define RETURN_ADDR_REGNUM 31
1054 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1055 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1057 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1058 The return address and frame pointer are treated separately.
1059 Don't consider them here. */
1060 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1061 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1062 && (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
1064 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM))
1066 /* Return the bytes needed to compute the frame pointer from the current
1067 stack pointer.
1069 SIZE is the size needed for local variables. */
1071 unsigned int
1072 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1074 int regno;
1075 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1076 unsigned int reg_size, reg_offset;
1077 unsigned int gmask;
1078 enum arc_function_type fn_type;
1079 int interrupt_p;
1081 var_size = size;
1082 args_size = crtl->outgoing_args_size;
1083 pretend_size = crtl->args.pretend_args_size;
1084 extra_size = FIRST_PARM_OFFSET (0);
1085 total_size = extra_size + pretend_size + args_size + var_size;
1086 reg_offset = FIRST_PARM_OFFSET(0) + crtl->outgoing_args_size;
1087 reg_size = 0;
1088 gmask = 0;
1090 /* See if this is an interrupt handler. Call used registers must be saved
1091 for them too. */
1092 fn_type = arc_compute_function_type (current_function_decl);
1093 interrupt_p = ARC_INTERRUPT_P (fn_type);
1095 /* Calculate space needed for registers.
1096 ??? We ignore the extension registers for now. */
1098 for (regno = 0; regno <= 31; regno++)
1100 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1102 reg_size += UNITS_PER_WORD;
1103 gmask |= 1 << regno;
1107 total_size += reg_size;
1109 /* If the only space to allocate is the fp/blink save area this is an
1110 empty frame. However, if we'll be making a function call we need to
1111 allocate a stack frame for our callee's fp/blink save area. */
1112 if (total_size == extra_size
1113 && !MUST_SAVE_RETURN_ADDR)
1114 total_size = extra_size = 0;
1116 total_size = ARC_STACK_ALIGN (total_size);
1118 /* Save computed information. */
1119 current_frame_info.total_size = total_size;
1120 current_frame_info.extra_size = extra_size;
1121 current_frame_info.pretend_size = pretend_size;
1122 current_frame_info.var_size = var_size;
1123 current_frame_info.args_size = args_size;
1124 current_frame_info.reg_size = reg_size;
1125 current_frame_info.reg_offset = reg_offset;
1126 current_frame_info.gmask = gmask;
1127 current_frame_info.initialized = reload_completed;
1129 /* Ok, we're done. */
1130 return total_size;
1133 /* Common code to save/restore registers. */
1135 void
1136 arc_save_restore (FILE *file,
1137 const char *base_reg,
1138 unsigned int offset,
1139 unsigned int gmask,
1140 const char *op)
1142 int regno;
1144 if (gmask == 0)
1145 return;
1147 for (regno = 0; regno <= 31; regno++)
1149 if ((gmask & (1L << regno)) != 0)
1151 fprintf (file, "\t%s %s,[%s,%d]\n",
1152 op, reg_names[regno], base_reg, offset);
1153 offset += UNITS_PER_WORD;
1158 /* Target hook to assemble an integer object. The ARC version needs to
1159 emit a special directive for references to labels and function
1160 symbols. */
1162 static bool
1163 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1165 if (size == UNITS_PER_WORD && aligned_p
1166 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1167 || GET_CODE (x) == LABEL_REF))
1169 fputs ("\t.word\t%st(", asm_out_file);
1170 output_addr_const (asm_out_file, x);
1171 fputs (")\n", asm_out_file);
1172 return true;
1174 return default_assemble_integer (x, size, aligned_p);
1177 /* Set up the stack and frame pointer (if desired) for the function. */
1179 static void
1180 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1182 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1183 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1184 unsigned int gmask = current_frame_info.gmask;
1185 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1187 /* If this is an interrupt handler, set up our stack frame.
1188 ??? Optimize later. */
1189 if (ARC_INTERRUPT_P (fn_type))
1191 fprintf (file, "\t%s interrupt handler\n",
1192 ASM_COMMENT_START);
1193 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1196 /* This is only for the human reader. */
1197 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1198 ASM_COMMENT_START, ASM_COMMENT_START,
1199 current_frame_info.var_size,
1200 current_frame_info.reg_size / 4,
1201 current_frame_info.args_size,
1202 current_frame_info.extra_size);
1204 size = ARC_STACK_ALIGN (size);
1205 size = (! current_frame_info.initialized
1206 ? arc_compute_frame_size (size)
1207 : current_frame_info.total_size);
1209 /* These cases shouldn't happen. Catch them now. */
1210 gcc_assert (size || !gmask);
1212 /* Allocate space for register arguments if this is a variadic function. */
1213 if (current_frame_info.pretend_size != 0)
1214 fprintf (file, "\tsub %s,%s,%d\n",
1215 sp_str, sp_str, current_frame_info.pretend_size);
1217 /* The home-grown ABI says link register is saved first. */
1218 if (MUST_SAVE_RETURN_ADDR)
1219 fprintf (file, "\tst %s,[%s,%d]\n",
1220 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1222 /* Set up the previous frame pointer next (if we need to). */
1223 if (frame_pointer_needed)
1225 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1226 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1229 /* ??? We don't handle the case where the saved regs are more than 252
1230 bytes away from sp. This can be handled by decrementing sp once, saving
1231 the regs, and then decrementing it again. The epilogue doesn't have this
1232 problem as the `ld' insn takes reg+limm values (though it would be more
1233 efficient to avoid reg+limm). */
1235 /* Allocate the stack frame. */
1236 if (size - current_frame_info.pretend_size > 0)
1237 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1238 sp_str, sp_str, size - current_frame_info.pretend_size);
1240 /* Save any needed call-saved regs (and call-used if this is an
1241 interrupt handler). */
1242 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1243 /* The zeroing of these two bits is unnecessary,
1244 but leave this in for clarity. */
1245 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1246 "st");
1248 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1251 /* Do any necessary cleanup after a function to restore stack, frame,
1252 and regs. */
1254 static void
1255 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1257 rtx epilogue_delay = crtl->epilogue_delay_list;
1258 int noepilogue = FALSE;
1259 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1261 /* This is only for the human reader. */
1262 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1264 size = ARC_STACK_ALIGN (size);
1265 size = (!current_frame_info.initialized
1266 ? arc_compute_frame_size (size)
1267 : current_frame_info.total_size);
1269 if (size == 0 && epilogue_delay == 0)
1271 rtx insn = get_last_insn ();
1273 /* If the last insn was a BARRIER, we don't have to write any code
1274 because a jump (aka return) was put there. */
1275 if (GET_CODE (insn) == NOTE)
1276 insn = prev_nonnote_insn (insn);
1277 if (insn && GET_CODE (insn) == BARRIER)
1278 noepilogue = TRUE;
1281 if (!noepilogue)
1283 unsigned int pretend_size = current_frame_info.pretend_size;
1284 unsigned int frame_size = size - pretend_size;
1285 int restored, fp_restored_p;
1286 int can_trust_sp_p = !cfun->calls_alloca;
1287 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1288 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1290 /* ??? There are lots of optimizations that can be done here.
1291 EG: Use fp to restore regs if it's closer.
1292 Maybe in time we'll do them all. For now, always restore regs from
1293 sp, but don't restore sp if we don't have to. */
1295 if (!can_trust_sp_p)
1297 gcc_assert (frame_pointer_needed);
1298 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1299 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1302 /* Restore any saved registers. */
1303 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1304 /* The zeroing of these two bits is unnecessary,
1305 but leave this in for clarity. */
1306 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1307 "ld");
1309 if (MUST_SAVE_RETURN_ADDR)
1310 fprintf (file, "\tld %s,[%s,%d]\n",
1311 reg_names[RETURN_ADDR_REGNUM],
1312 frame_pointer_needed ? fp_str : sp_str,
1313 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1315 /* Keep track of how much of the stack pointer we've restored.
1316 It makes the following a lot more readable. */
1317 restored = 0;
1318 fp_restored_p = 0;
1320 /* We try to emit the epilogue delay slot insn right after the load
1321 of the return address register so that it can execute with the
1322 stack intact. Secondly, loads are delayed. */
1323 /* ??? If stack intactness is important, always emit now. */
1324 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1326 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1327 epilogue_delay = NULL_RTX;
1330 if (frame_pointer_needed)
1332 /* Try to restore the frame pointer in the delay slot. We can't,
1333 however, if any of these is true. */
1334 if (epilogue_delay != NULL_RTX
1335 || !SMALL_INT (frame_size)
1336 || pretend_size
1337 || ARC_INTERRUPT_P (fn_type))
1339 /* Note that we restore fp and sp here! */
1340 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1341 restored += frame_size;
1342 fp_restored_p = 1;
1345 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1346 || ARC_INTERRUPT_P (fn_type))
1348 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1349 restored += frame_size;
1352 /* These must be done before the return insn because the delay slot
1353 does the final stack restore. */
1354 if (ARC_INTERRUPT_P (fn_type))
1356 if (epilogue_delay)
1358 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1362 /* Emit the return instruction. */
1364 static const int regs[4] = {
1365 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1368 /* Update the flags, if returning from an interrupt handler. */
1369 if (ARC_INTERRUPT_P (fn_type))
1370 fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
1371 else
1372 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1375 /* If the only register saved is the return address, we need a
1376 nop, unless we have an instruction to put into it. Otherwise
1377 we don't since reloading multiple registers doesn't reference
1378 the register being loaded. */
1380 if (ARC_INTERRUPT_P (fn_type))
1381 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1382 else if (epilogue_delay != NULL_RTX)
1384 gcc_assert (!frame_pointer_needed || fp_restored_p);
1385 gcc_assert (restored >= size);
1386 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1388 else if (frame_pointer_needed && !fp_restored_p)
1390 gcc_assert (SMALL_INT (frame_size));
1391 /* Note that we restore fp and sp here! */
1392 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1394 else if (restored < size)
1396 gcc_assert (SMALL_INT (size - restored));
1397 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1398 sp_str, sp_str, size - restored);
1400 else
1401 fprintf (file, "\tnop\n");
1404 /* Reset state info for each function. */
1405 current_frame_info = zero_frame_info;
1406 arc_compute_function_type (NULL_TREE);
1409 /* Define the number of delay slots needed for the function epilogue.
1411 Interrupt handlers can't have any epilogue delay slots (it's always needed
1412 for something else, I think). For normal functions, we have to worry about
1413 using call-saved regs as they'll be restored before the delay slot insn.
1414 Functions with non-empty frames already have enough choices for the epilogue
1415 delay slot so for now we only consider functions with empty frames. */
1418 arc_delay_slots_for_epilogue (void)
1420 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1421 return 0;
1422 if (!current_frame_info.initialized)
1423 (void) arc_compute_frame_size (get_frame_size ());
1424 if (current_frame_info.total_size == 0)
1425 return 1;
1426 return 0;
1429 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1430 Any single length instruction which doesn't reference the stack or frame
1431 pointer or any call-saved register is OK. SLOT will always be 0. */
1434 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1436 gcc_assert (!slot);
1438 if (get_attr_length (trial) == 1
1439 /* If registers where saved, presumably there's more than enough
1440 possibilities for the delay slot. The alternative is something
1441 more complicated (of course, if we expanded the epilogue as rtl
1442 this problem would go away). */
1443 /* ??? Note that this will always be true since only functions with
1444 empty frames have epilogue delay slots. See
1445 arc_delay_slots_for_epilogue. */
1446 && current_frame_info.gmask == 0
1447 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1448 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1449 return 1;
1450 return 0;
1453 /* Return true if OP is a shift operator. */
1456 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1458 switch (GET_CODE (op))
1460 case ASHIFTRT:
1461 case LSHIFTRT:
1462 case ASHIFT:
1463 return 1;
1464 default:
1465 return 0;
1469 /* Output the assembler code for doing a shift.
1470 We go to a bit of trouble to generate efficient code as the ARC only has
1471 single bit shifts. This is taken from the h8300 port. We only have one
1472 mode of shifting and can't access individual bytes like the h8300 can, so
1473 this is greatly simplified (at the expense of not generating hyper-
1474 efficient code).
1476 This function is not used if the variable shift insns are present. */
1478 /* ??? We assume the output operand is the same as operand 1.
1479 This can be optimized (deleted) in the case of 1 bit shifts. */
1480 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1481 using it here will give us a chance to play with it. */
1483 const char *
1484 output_shift (rtx *operands)
1486 rtx shift = operands[3];
1487 enum machine_mode mode = GET_MODE (shift);
1488 enum rtx_code code = GET_CODE (shift);
1489 const char *shift_one;
1491 gcc_assert (mode == SImode);
1493 switch (code)
1495 case ASHIFT: shift_one = "asl %0,%0"; break;
1496 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1497 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1498 default: gcc_unreachable ();
1501 if (GET_CODE (operands[2]) != CONST_INT)
1503 if (optimize)
1505 output_asm_insn ("sub.f 0,%2,0", operands);
1506 output_asm_insn ("mov lp_count,%2", operands);
1507 output_asm_insn ("bz 2f", operands);
1509 else
1510 output_asm_insn ("mov %4,%2", operands);
1511 goto shiftloop;
1513 else
1515 int n = INTVAL (operands[2]);
1517 /* If the count is negative, make it 0. */
1518 if (n < 0)
1519 n = 0;
1520 /* If the count is too big, truncate it.
1521 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1522 do the intuitive thing. */
1523 else if (n > GET_MODE_BITSIZE (mode))
1524 n = GET_MODE_BITSIZE (mode);
1526 /* First see if we can do them inline. */
1527 if (n <= 8)
1529 while (--n >= 0)
1530 output_asm_insn (shift_one, operands);
1532 /* See if we can use a rotate/and. */
1533 else if (n == BITS_PER_WORD - 1)
1535 switch (code)
1537 case ASHIFT :
1538 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1539 break;
1540 case ASHIFTRT :
1541 /* The ARC doesn't have a rol insn. Use something else. */
1542 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1543 break;
1544 case LSHIFTRT :
1545 /* The ARC doesn't have a rol insn. Use something else. */
1546 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1547 break;
1548 default:
1549 break;
1552 /* Must loop. */
1553 else
1555 char buf[100];
1557 if (optimize)
1558 output_asm_insn ("mov lp_count,%c2", operands);
1559 else
1560 output_asm_insn ("mov %4,%c2", operands);
1561 shiftloop:
1562 if (optimize)
1564 if (flag_pic)
1565 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1566 ASM_COMMENT_START);
1567 else
1568 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1569 ASM_COMMENT_START);
1570 output_asm_insn (buf, operands);
1571 output_asm_insn ("sr %4,[lp_start]", operands);
1572 output_asm_insn ("add %4,%4,1", operands);
1573 output_asm_insn ("sr %4,[lp_end]", operands);
1574 output_asm_insn ("nop\n\tnop", operands);
1575 if (flag_pic)
1576 fprintf (asm_out_file, "\t%s single insn loop\n",
1577 ASM_COMMENT_START);
1578 else
1579 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1580 ASM_COMMENT_START);
1581 output_asm_insn (shift_one, operands);
1582 fprintf (asm_out_file, "2:\t%s end single insn loop\n",
1583 ASM_COMMENT_START);
1585 else
1587 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1588 ASM_COMMENT_START);
1589 output_asm_insn ("sub.f %4,%4,1", operands);
1590 output_asm_insn ("nop", operands);
1591 output_asm_insn ("bn.nd 2f", operands);
1592 output_asm_insn (shift_one, operands);
1593 output_asm_insn ("b.nd 1b", operands);
1594 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1595 ASM_COMMENT_START);
1600 return "";
1603 /* Nested function support. */
1605 /* Emit RTL insns to initialize the variable parts of a trampoline.
1606 FNADDR is an RTX for the address of the function's pure code.
1607 CXT is an RTX for the static chain value for the function. */
1609 void
1610 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1611 rtx fnaddr ATTRIBUTE_UNUSED,
1612 rtx cxt ATTRIBUTE_UNUSED)
1616 /* Set the cpu type and print out other fancy things,
1617 at the top of the file. */
1619 static void
1620 arc_file_start (void)
1622 default_file_start ();
1623 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1626 /* Print operand X (an rtx) in assembler syntax to file FILE.
1627 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1628 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1630 void
1631 arc_print_operand (FILE *file, rtx x, int code)
1633 switch (code)
1635 case '#' :
1636 /* Conditional branches. For now these are equivalent. */
1637 case '*' :
1638 /* Unconditional branches. Output the appropriate delay slot suffix. */
1639 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1641 /* There's nothing in the delay slot. */
1642 fputs (".nd", file);
1644 else
1646 rtx jump = XVECEXP (final_sequence, 0, 0);
1647 rtx delay = XVECEXP (final_sequence, 0, 1);
1648 if (INSN_ANNULLED_BRANCH_P (jump))
1649 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1650 else
1651 fputs (".d", file);
1653 return;
1654 case '?' : /* with leading "." */
1655 case '!' : /* without leading "." */
1656 /* This insn can be conditionally executed. See if the ccfsm machinery
1657 says it should be conditionalized. */
1658 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1660 /* Is this insn in a delay slot? */
1661 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1663 rtx insn = XVECEXP (final_sequence, 0, 1);
1665 /* If the insn is annulled and is from the target path, we need
1666 to inverse the condition test. */
1667 if (INSN_ANNULLED_BRANCH_P (insn))
1669 if (INSN_FROM_TARGET_P (insn))
1670 fprintf (file, "%s%s",
1671 code == '?' ? "." : "",
1672 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1673 else
1674 fprintf (file, "%s%s",
1675 code == '?' ? "." : "",
1676 arc_condition_codes[arc_ccfsm_current_cc]);
1678 else
1680 /* This insn is executed for either path, so don't
1681 conditionalize it at all. */
1682 ; /* nothing to do */
1685 else
1687 /* This insn isn't in a delay slot. */
1688 fprintf (file, "%s%s",
1689 code == '?' ? "." : "",
1690 arc_condition_codes[arc_ccfsm_current_cc]);
1693 return;
1694 case '~' :
1695 /* Output a nop if we're between a set of the condition codes,
1696 and a conditional branch. */
1697 if (last_insn_set_cc_p)
1698 fputs ("nop\n\t", file);
1699 return;
1700 case 'd' :
1701 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1702 return;
1703 case 'D' :
1704 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1705 (get_arc_condition_code (x))],
1706 file);
1707 return;
1708 case 'R' :
1709 /* Write second word of DImode or DFmode reference,
1710 register or memory. */
1711 if (GET_CODE (x) == REG)
1712 fputs (reg_names[REGNO (x)+1], file);
1713 else if (GET_CODE (x) == MEM)
1715 fputc ('[', file);
1716 /* Handle possible auto-increment. Since it is pre-increment and
1717 we have already done it, we can just use an offset of four. */
1718 /* ??? This is taken from rs6000.c I think. I don't think it is
1719 currently necessary, but keep it around. */
1720 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1721 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1722 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1723 else
1724 output_address (plus_constant (XEXP (x, 0), 4));
1725 fputc (']', file);
1727 else
1728 output_operand_lossage ("invalid operand to %%R code");
1729 return;
1730 case 'S' :
1731 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1732 || GET_CODE (x) == LABEL_REF)
1734 fprintf (file, "%%st(");
1735 output_addr_const (file, x);
1736 fprintf (file, ")");
1737 return;
1739 break;
1740 case 'H' :
1741 case 'L' :
1742 if (GET_CODE (x) == REG)
1744 /* L = least significant word, H = most significant word */
1745 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1746 fputs (reg_names[REGNO (x)], file);
1747 else
1748 fputs (reg_names[REGNO (x)+1], file);
1750 else if (GET_CODE (x) == CONST_INT
1751 || GET_CODE (x) == CONST_DOUBLE)
1753 rtx first, second;
1755 split_double (x, &first, &second);
1756 fprintf (file, "0x%08lx",
1757 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1759 else
1760 output_operand_lossage ("invalid operand to %%H/%%L code");
1761 return;
1762 case 'A' :
1764 char str[30];
1766 gcc_assert (GET_CODE (x) == CONST_DOUBLE
1767 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1769 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1770 fprintf (file, "%s", str);
1771 return;
1773 case 'U' :
1774 /* Output a load/store with update indicator if appropriate. */
1775 if (GET_CODE (x) == MEM)
1777 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1778 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1779 fputs (".a", file);
1781 else
1782 output_operand_lossage ("invalid operand to %%U code");
1783 return;
1784 case 'V' :
1785 /* Output cache bypass indicator for a load/store insn. Volatile memory
1786 refs are defined to use the cache bypass mechanism. */
1787 if (GET_CODE (x) == MEM)
1789 if (MEM_VOLATILE_P (x))
1790 fputs (".di", file);
1792 else
1793 output_operand_lossage ("invalid operand to %%V code");
1794 return;
1795 case 0 :
1796 /* Do nothing special. */
1797 break;
1798 default :
1799 /* Unknown flag. */
1800 output_operand_lossage ("invalid operand output code");
1803 switch (GET_CODE (x))
1805 case REG :
1806 fputs (reg_names[REGNO (x)], file);
1807 break;
1808 case MEM :
1809 fputc ('[', file);
1810 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1811 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1812 GET_MODE_SIZE (GET_MODE (x))));
1813 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1814 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1815 - GET_MODE_SIZE (GET_MODE (x))));
1816 else
1817 output_address (XEXP (x, 0));
1818 fputc (']', file);
1819 break;
1820 case CONST_DOUBLE :
1821 /* We handle SFmode constants here as output_addr_const doesn't. */
1822 if (GET_MODE (x) == SFmode)
1824 REAL_VALUE_TYPE d;
1825 long l;
1827 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1828 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1829 fprintf (file, "0x%08lx", l);
1830 break;
1832 /* Fall through. Let output_addr_const deal with it. */
1833 default :
1834 output_addr_const (file, x);
1835 break;
1839 /* Print a memory address as an operand to reference that memory location. */
1841 void
1842 arc_print_operand_address (FILE *file, rtx addr)
1844 register rtx base, index = 0;
1845 int offset = 0;
1847 switch (GET_CODE (addr))
1849 case REG :
1850 fputs (reg_names[REGNO (addr)], file);
1851 break;
1852 case SYMBOL_REF :
1853 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1855 fprintf (file, "%%st(");
1856 output_addr_const (file, addr);
1857 fprintf (file, ")");
1859 else
1860 output_addr_const (file, addr);
1861 break;
1862 case PLUS :
1863 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1864 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1865 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1866 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1867 else
1868 base = XEXP (addr, 0), index = XEXP (addr, 1);
1869 gcc_assert (GET_CODE (base) == REG);
1870 fputs (reg_names[REGNO (base)], file);
1871 if (index == 0)
1873 if (offset != 0)
1874 fprintf (file, ",%d", offset);
1876 else
1878 switch (GET_CODE (index))
1880 case REG:
1881 fprintf (file, ",%s", reg_names[REGNO (index)]);
1882 break;
1883 case SYMBOL_REF:
1884 fputc (',', file), output_addr_const (file, index);
1885 break;
1886 default:
1887 gcc_unreachable ();
1890 break;
1891 case PRE_INC :
1892 case PRE_DEC :
1893 /* We shouldn't get here as we've lost the mode of the memory object
1894 (which says how much to inc/dec by. */
1895 gcc_unreachable ();
1896 break;
1897 default :
1898 output_addr_const (file, addr);
1899 break;
1903 /* Update compare/branch separation marker. */
1905 static void
1906 record_cc_ref (rtx insn)
1908 last_insn_set_cc_p = current_insn_set_cc_p;
1910 switch (get_attr_cond (insn))
1912 case COND_SET :
1913 case COND_SET_ZN :
1914 case COND_SET_ZNC :
1915 if (get_attr_length (insn) == 1)
1916 current_insn_set_cc_p = 1;
1917 else
1918 current_insn_set_cc_p = 0;
1919 break;
1920 default :
1921 current_insn_set_cc_p = 0;
1922 break;
1926 /* Conditional execution support.
1928 This is based on the ARM port but for now is much simpler.
1930 A finite state machine takes care of noticing whether or not instructions
1931 can be conditionally executed, and thus decrease execution time and code
1932 size by deleting branch instructions. The fsm is controlled by
1933 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1934 in the .md file for the branch insns also have a hand in this. */
1936 /* The state of the fsm controlling condition codes are:
1937 0: normal, do nothing special
1938 1: don't output this insn
1939 2: don't output this insn
1940 3: make insns conditional
1941 4: make insns conditional
1943 State transitions (state->state by whom, under what condition):
1944 0 -> 1 final_prescan_insn, if insn is conditional branch
1945 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1946 1 -> 3 branch patterns, after having not output the conditional branch
1947 2 -> 4 branch patterns, after having not output the conditional branch
1948 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1949 (the target label has CODE_LABEL_NUMBER equal to
1950 arc_ccfsm_target_label).
1951 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1953 If the jump clobbers the conditions then we use states 2 and 4.
1955 A similar thing can be done with conditional return insns.
1957 We also handle separating branches from sets of the condition code.
1958 This is done here because knowledge of the ccfsm state is required,
1959 we may not be outputting the branch. */
1961 void
1962 arc_final_prescan_insn (rtx insn,
1963 rtx *opvec ATTRIBUTE_UNUSED,
1964 int noperands ATTRIBUTE_UNUSED)
1966 /* BODY will hold the body of INSN. */
1967 register rtx body = PATTERN (insn);
1969 /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1970 an if/then/else), and things need to be reversed. */
1971 int reverse = 0;
1973 /* If we start with a return insn, we only succeed if we find another one. */
1974 int seeking_return = 0;
1976 /* START_INSN will hold the insn from where we start looking. This is the
1977 first insn after the following code_label if REVERSE is true. */
1978 rtx start_insn = insn;
1980 /* Update compare/branch separation marker. */
1981 record_cc_ref (insn);
1983 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1984 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1985 final_scan_insn which has `optimize' as a local. */
1986 if (optimize < 2 || TARGET_NO_COND_EXEC)
1987 return;
1989 /* If in state 4, check if the target branch is reached, in order to
1990 change back to state 0. */
1991 if (arc_ccfsm_state == 4)
1993 if (insn == arc_ccfsm_target_insn)
1995 arc_ccfsm_target_insn = NULL;
1996 arc_ccfsm_state = 0;
1998 return;
2001 /* If in state 3, it is possible to repeat the trick, if this insn is an
2002 unconditional branch to a label, and immediately following this branch
2003 is the previous target label which is only used once, and the label this
2004 branch jumps to is not too far off. Or in other words "we've done the
2005 `then' part, see if we can do the `else' part." */
2006 if (arc_ccfsm_state == 3)
2008 if (simplejump_p (insn))
2010 start_insn = next_nonnote_insn (start_insn);
2011 if (GET_CODE (start_insn) == BARRIER)
2013 /* ??? Isn't this always a barrier? */
2014 start_insn = next_nonnote_insn (start_insn);
2016 if (GET_CODE (start_insn) == CODE_LABEL
2017 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2018 && LABEL_NUSES (start_insn) == 1)
2019 reverse = TRUE;
2020 else
2021 return;
2023 else if (GET_CODE (body) == RETURN)
2025 start_insn = next_nonnote_insn (start_insn);
2026 if (GET_CODE (start_insn) == BARRIER)
2027 start_insn = next_nonnote_insn (start_insn);
2028 if (GET_CODE (start_insn) == CODE_LABEL
2029 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2030 && LABEL_NUSES (start_insn) == 1)
2032 reverse = TRUE;
2033 seeking_return = 1;
2035 else
2036 return;
2038 else
2039 return;
2042 if (GET_CODE (insn) != JUMP_INSN)
2043 return;
2045 /* This jump might be paralleled with a clobber of the condition codes,
2046 the jump should always come first. */
2047 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2048 body = XVECEXP (body, 0, 0);
2050 if (reverse
2051 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2052 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2054 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2055 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2056 int then_not_else = TRUE;
2057 /* Nonzero if next insn must be the target label. */
2058 int next_must_be_target_label_p;
2059 rtx this_insn = start_insn, label = 0;
2061 /* Register the insn jumped to. */
2062 if (reverse)
2064 if (!seeking_return)
2065 label = XEXP (SET_SRC (body), 0);
2067 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2068 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2069 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2071 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2072 then_not_else = FALSE;
2074 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2075 seeking_return = 1;
2076 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2078 seeking_return = 1;
2079 then_not_else = FALSE;
2081 else
2082 gcc_unreachable ();
2084 /* See how many insns this branch skips, and what kind of insns. If all
2085 insns are okay, and the label or unconditional branch to the same
2086 label is not too far away, succeed. */
2087 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2088 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2089 insns_skipped++)
2091 rtx scanbody;
2093 this_insn = next_nonnote_insn (this_insn);
2094 if (!this_insn)
2095 break;
2097 if (next_must_be_target_label_p)
2099 if (GET_CODE (this_insn) == BARRIER)
2100 continue;
2101 if (GET_CODE (this_insn) == CODE_LABEL
2102 && this_insn == label)
2104 arc_ccfsm_state = 1;
2105 succeed = TRUE;
2107 else
2108 fail = TRUE;
2109 break;
2112 scanbody = PATTERN (this_insn);
2114 switch (GET_CODE (this_insn))
2116 case CODE_LABEL:
2117 /* Succeed if it is the target label, otherwise fail since
2118 control falls in from somewhere else. */
2119 if (this_insn == label)
2121 arc_ccfsm_state = 1;
2122 succeed = TRUE;
2124 else
2125 fail = TRUE;
2126 break;
2128 case BARRIER:
2129 /* Succeed if the following insn is the target label.
2130 Otherwise fail.
2131 If return insns are used then the last insn in a function
2132 will be a barrier. */
2133 next_must_be_target_label_p = TRUE;
2134 break;
2136 case CALL_INSN:
2137 /* Can handle a call insn if there are no insns after it.
2138 IE: The next "insn" is the target label. We don't have to
2139 worry about delay slots as such insns are SEQUENCE's inside
2140 INSN's. ??? It is possible to handle such insns though. */
2141 if (get_attr_cond (this_insn) == COND_CANUSE)
2142 next_must_be_target_label_p = TRUE;
2143 else
2144 fail = TRUE;
2145 break;
2147 case JUMP_INSN:
2148 /* If this is an unconditional branch to the same label, succeed.
2149 If it is to another label, do nothing. If it is conditional,
2150 fail. */
2151 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2153 if (GET_CODE (scanbody) == SET
2154 && GET_CODE (SET_DEST (scanbody)) == PC)
2156 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2157 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2159 arc_ccfsm_state = 2;
2160 succeed = TRUE;
2162 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2163 fail = TRUE;
2165 else if (GET_CODE (scanbody) == RETURN
2166 && seeking_return)
2168 arc_ccfsm_state = 2;
2169 succeed = TRUE;
2171 else if (GET_CODE (scanbody) == PARALLEL)
2173 if (get_attr_cond (this_insn) != COND_CANUSE)
2174 fail = TRUE;
2176 break;
2178 case INSN:
2179 /* We can only do this with insns that can use the condition
2180 codes (and don't set them). */
2181 if (GET_CODE (scanbody) == SET
2182 || GET_CODE (scanbody) == PARALLEL)
2184 if (get_attr_cond (this_insn) != COND_CANUSE)
2185 fail = TRUE;
2187 /* We can't handle other insns like sequences. */
2188 else
2189 fail = TRUE;
2190 break;
2192 default:
2193 break;
2197 if (succeed)
2199 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2200 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2201 else
2203 gcc_assert (seeking_return || arc_ccfsm_state == 2);
2204 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2206 this_insn = next_nonnote_insn (this_insn);
2207 gcc_assert (!this_insn
2208 || (GET_CODE (this_insn) != BARRIER
2209 && GET_CODE (this_insn) != CODE_LABEL));
2211 if (!this_insn)
2213 /* Oh dear! we ran off the end, give up. */
2214 extract_insn_cached (insn);
2215 arc_ccfsm_state = 0;
2216 arc_ccfsm_target_insn = NULL;
2217 return;
2219 arc_ccfsm_target_insn = this_insn;
2222 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2223 what it was. */
2224 if (!reverse)
2225 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2226 0));
2228 if (reverse || then_not_else)
2229 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2232 /* Restore recog_data. Getting the attributes of other insns can
2233 destroy this array, but final.c assumes that it remains intact
2234 across this call. */
2235 extract_insn_cached (insn);
2239 /* Record that we are currently outputting label NUM with prefix PREFIX.
2240 It it's the label we're looking for, reset the ccfsm machinery.
2242 Called from (*targetm.asm_out.internal_label). */
2244 void
2245 arc_ccfsm_at_label (const char *prefix, int num)
2247 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2248 && !strcmp (prefix, "L"))
2250 arc_ccfsm_state = 0;
2251 arc_ccfsm_target_insn = NULL_RTX;
2255 /* See if the current insn, which is a conditional branch, is to be
2256 deleted. */
2259 arc_ccfsm_branch_deleted_p (void)
2261 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2262 return 1;
2263 return 0;
2266 /* Record a branch isn't output because subsequent insns can be
2267 conditionalized. */
2269 void
2270 arc_ccfsm_record_branch_deleted (void)
2272 /* Indicate we're conditionalizing insns now. */
2273 arc_ccfsm_state += 2;
2275 /* If the next insn is a subroutine call, we still need a nop between the
2276 cc setter and user. We need to undo the effect of calling record_cc_ref
2277 for the just deleted branch. */
2278 current_insn_set_cc_p = last_insn_set_cc_p;
2281 static void
2282 arc_va_start (tree valist, rtx nextarg)
2284 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2285 if (crtl->args.info < 8
2286 && (crtl->args.info & 1))
2287 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2289 std_expand_builtin_va_start (valist, nextarg);
2292 /* This is how to output a definition of an internal numbered label where
2293 PREFIX is the class of label and NUM is the number within the class. */
2295 static void
2296 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2298 arc_ccfsm_at_label (prefix, labelno);
2299 default_internal_label (stream, prefix, labelno);
2302 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2304 static void
2305 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2307 #if 0
2308 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2309 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2310 and we'll get another suffix added on if -mmangle-cpu. */
2311 if (TARGET_MANGLE_CPU_LIBGCC)
2313 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2314 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2315 arc_mangle_suffix);
2317 #endif
2320 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2322 static bool
2323 arc_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2325 if (AGGREGATE_TYPE_P (type))
2326 return true;
2327 else
2329 HOST_WIDE_INT size = int_size_in_bytes (type);
2330 return (size == -1 || size > 8);
2334 /* For ARC, All aggregates and arguments greater than 8 bytes are
2335 passed by reference. */
2337 static bool
2338 arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2339 enum machine_mode mode, const_tree type,
2340 bool named ATTRIBUTE_UNUSED)
2342 unsigned HOST_WIDE_INT size;
2344 if (type)
2346 if (AGGREGATE_TYPE_P (type))
2347 return true;
2348 size = int_size_in_bytes (type);
2350 else
2351 size = GET_MODE_SIZE (mode);
2353 return size > 8;