Merge from mainline (163495:164578).
[official-gcc/graphite-test-results.git] / gcc / config / arc / arc.c
blob5031b99e2f7939e882e7d0e580a2f21e4c40de1d
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
3 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "toplev.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
46 /* Which cpu we're compiling for. */
47 int arc_cpu_type;
49 /* Name of mangle string to add to symbols to separate code compiled for each
50 cpu (or NULL). */
51 const char *arc_mangle_cpu;
53 /* Name of text, data, and rodata sections used in varasm.c. */
54 const char *arc_text_section;
55 const char *arc_data_section;
56 const char *arc_rodata_section;
58 /* Array of valid operand punctuation characters. */
59 char arc_punct_chars[256];
61 /* Variables used by arc_final_prescan_insn to implement conditional
62 execution. */
63 static int arc_ccfsm_state;
64 static int arc_ccfsm_current_cc;
65 static rtx arc_ccfsm_target_insn;
66 static int arc_ccfsm_target_label;
68 /* The maximum number of insns skipped which will be conditionalised if
69 possible. */
70 #define MAX_INSNS_SKIPPED 3
72 /* A nop is needed between a 4 byte insn that sets the condition codes and
73 a branch that uses them (the same isn't true for an 8 byte insn that sets
74 the condition codes). Set by arc_final_prescan_insn. Used by
75 arc_print_operand. */
76 static int last_insn_set_cc_p;
77 static int current_insn_set_cc_p;
78 static bool arc_handle_option (size_t, const char *, int);
79 static void record_cc_ref (rtx);
80 static void arc_init_reg_tables (void);
81 static int get_arc_condition_code (rtx);
82 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
83 static bool arc_assemble_integer (rtx, unsigned int, int);
84 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
85 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
86 static void arc_file_start (void);
87 static void arc_internal_label (FILE *, const char *, unsigned long);
88 static void arc_va_start (tree, rtx);
89 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
90 tree, int *, int);
91 static bool arc_rtx_costs (rtx, int, int, int *, bool);
92 static int arc_address_cost (rtx, bool);
93 static void arc_external_libcall (rtx);
94 static bool arc_return_in_memory (const_tree, const_tree);
95 static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
97 static void arc_trampoline_init (rtx, tree, rtx);
98 static void arc_option_override (void);
101 /* ARC specific attributs. */
103 static const struct attribute_spec arc_attribute_table[] =
105 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
106 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
107 { NULL, 0, 0, false, false, false, NULL }
110 /* Initialize the GCC target structure. */
111 #undef TARGET_ASM_ALIGNED_HI_OP
112 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
113 #undef TARGET_ASM_ALIGNED_SI_OP
114 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
115 #undef TARGET_ASM_INTEGER
116 #define TARGET_ASM_INTEGER arc_assemble_integer
118 #undef TARGET_ASM_FUNCTION_PROLOGUE
119 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
120 #undef TARGET_ASM_FUNCTION_EPILOGUE
121 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
122 #undef TARGET_ASM_FILE_START
123 #define TARGET_ASM_FILE_START arc_file_start
124 #undef TARGET_ATTRIBUTE_TABLE
125 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
126 #undef TARGET_ASM_INTERNAL_LABEL
127 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
128 #undef TARGET_ASM_EXTERNAL_LIBCALL
129 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
131 #undef TARGET_HANDLE_OPTION
132 #define TARGET_HANDLE_OPTION arc_handle_option
134 #undef TARGET_OPTION_OVERRIDE
135 #define TARGET_OPTION_OVERRIDE arc_option_override
137 #undef TARGET_RTX_COSTS
138 #define TARGET_RTX_COSTS arc_rtx_costs
139 #undef TARGET_ADDRESS_COST
140 #define TARGET_ADDRESS_COST arc_address_cost
142 #undef TARGET_PROMOTE_FUNCTION_MODE
143 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
144 #undef TARGET_PROMOTE_PROTOTYPES
145 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
147 #undef TARGET_RETURN_IN_MEMORY
148 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
149 #undef TARGET_PASS_BY_REFERENCE
150 #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
151 #undef TARGET_CALLEE_COPIES
152 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
154 #undef TARGET_SETUP_INCOMING_VARARGS
155 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
157 #undef TARGET_EXPAND_BUILTIN_VA_START
158 #define TARGET_EXPAND_BUILTIN_VA_START arc_va_start
160 #undef TARGET_TRAMPOLINE_INIT
161 #define TARGET_TRAMPOLINE_INIT arc_trampoline_init
163 struct gcc_target targetm = TARGET_INITIALIZER;
165 /* Implement TARGET_HANDLE_OPTION. */
167 static bool
168 arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
170 switch (code)
172 case OPT_mcpu_:
173 return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
175 default:
176 return true;
180 /* Implement TARGET_OPTION_OVERRIDE.
181 These need to be done at start up. It's convenient to do them here. */
183 static void
184 arc_option_override (void)
186 char *tmp;
188 /* Set the pseudo-ops for the various standard sections. */
189 arc_text_section = tmp = XNEWVEC (char, strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
190 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
191 arc_data_section = tmp = XNEWVEC (char, strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
192 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
193 arc_rodata_section = tmp = XNEWVEC (char, strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
194 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
196 arc_init_reg_tables ();
198 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
199 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
200 arc_punct_chars['#'] = 1;
201 arc_punct_chars['*'] = 1;
202 arc_punct_chars['?'] = 1;
203 arc_punct_chars['!'] = 1;
204 arc_punct_chars['~'] = 1;
207 /* The condition codes of the ARC, and the inverse function. */
208 static const char *const arc_condition_codes[] =
210 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
211 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
214 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
216 /* Returns the index of the ARC condition code string in
217 `arc_condition_codes'. COMPARISON should be an rtx like
218 `(eq (...) (...))'. */
220 static int
221 get_arc_condition_code (rtx comparison)
223 switch (GET_CODE (comparison))
225 case EQ : return 2;
226 case NE : return 3;
227 case GT : return 10;
228 case LE : return 11;
229 case GE : return 12;
230 case LT : return 13;
231 case GTU : return 14;
232 case LEU : return 15;
233 case LTU : return 6;
234 case GEU : return 7;
235 default : gcc_unreachable ();
237 /*NOTREACHED*/
238 return (42);
241 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
242 return the mode to be used for the comparison. */
244 enum machine_mode
245 arc_select_cc_mode (enum rtx_code op,
246 rtx x ATTRIBUTE_UNUSED,
247 rtx y ATTRIBUTE_UNUSED)
249 switch (op)
251 case EQ :
252 case NE :
253 return CCZNmode;
254 default :
255 switch (GET_CODE (x))
257 case AND :
258 case IOR :
259 case XOR :
260 case SIGN_EXTEND :
261 case ZERO_EXTEND :
262 return CCZNmode;
263 case ASHIFT :
264 case ASHIFTRT :
265 case LSHIFTRT :
266 return CCZNCmode;
267 default:
268 break;
271 return CCmode;
274 /* Vectors to keep interesting information about registers where it can easily
275 be got. We use to use the actual mode value as the bit number, but there
276 is (or may be) more than 32 modes now. Instead we use two tables: one
277 indexed by hard register number, and one indexed by mode. */
279 /* The purpose of arc_mode_class is to shrink the range of modes so that
280 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
281 mapped into one arc_mode_class mode. */
283 enum arc_mode_class {
284 C_MODE,
285 S_MODE, D_MODE, T_MODE, O_MODE,
286 SF_MODE, DF_MODE, TF_MODE, OF_MODE
289 /* Modes for condition codes. */
290 #define C_MODES (1 << (int) C_MODE)
292 /* Modes for single-word and smaller quantities. */
293 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
295 /* Modes for double-word and smaller quantities. */
296 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
298 /* Modes for quad-word and smaller quantities. */
299 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
301 /* Value is 1 if register/mode pair is acceptable on arc. */
303 const unsigned int arc_hard_regno_mode_ok[] = {
304 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
305 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
306 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
307 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
309 /* ??? Leave these as S_MODES for now. */
310 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
311 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
312 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
313 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
316 unsigned int arc_mode_class [NUM_MACHINE_MODES];
318 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
320 static void
321 arc_init_reg_tables (void)
323 int i;
325 for (i = 0; i < NUM_MACHINE_MODES; i++)
327 switch (GET_MODE_CLASS (i))
329 case MODE_INT:
330 case MODE_PARTIAL_INT:
331 case MODE_COMPLEX_INT:
332 if (GET_MODE_SIZE (i) <= 4)
333 arc_mode_class[i] = 1 << (int) S_MODE;
334 else if (GET_MODE_SIZE (i) == 8)
335 arc_mode_class[i] = 1 << (int) D_MODE;
336 else if (GET_MODE_SIZE (i) == 16)
337 arc_mode_class[i] = 1 << (int) T_MODE;
338 else if (GET_MODE_SIZE (i) == 32)
339 arc_mode_class[i] = 1 << (int) O_MODE;
340 else
341 arc_mode_class[i] = 0;
342 break;
343 case MODE_FLOAT:
344 case MODE_COMPLEX_FLOAT:
345 if (GET_MODE_SIZE (i) <= 4)
346 arc_mode_class[i] = 1 << (int) SF_MODE;
347 else if (GET_MODE_SIZE (i) == 8)
348 arc_mode_class[i] = 1 << (int) DF_MODE;
349 else if (GET_MODE_SIZE (i) == 16)
350 arc_mode_class[i] = 1 << (int) TF_MODE;
351 else if (GET_MODE_SIZE (i) == 32)
352 arc_mode_class[i] = 1 << (int) OF_MODE;
353 else
354 arc_mode_class[i] = 0;
355 break;
356 case MODE_CC:
357 arc_mode_class[i] = 1 << (int) C_MODE;
358 break;
359 default:
360 arc_mode_class[i] = 0;
361 break;
365 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
367 if (i < 60)
368 arc_regno_reg_class[i] = GENERAL_REGS;
369 else if (i == 60)
370 arc_regno_reg_class[i] = LPCOUNT_REG;
371 else if (i == 61)
372 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
373 else
374 arc_regno_reg_class[i] = NO_REGS;
378 /* ARC specific attribute support.
380 The ARC has these attributes:
381 interrupt - for interrupt functions
384 /* Handle an "interrupt" attribute; arguments as in
385 struct attribute_spec.handler. */
386 static tree
387 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
388 tree name,
389 tree args,
390 int flags ATTRIBUTE_UNUSED,
391 bool *no_add_attrs)
393 tree value = TREE_VALUE (args);
395 if (TREE_CODE (value) != STRING_CST)
397 warning (OPT_Wattributes,
398 "argument of %qE attribute is not a string constant",
399 name);
400 *no_add_attrs = true;
402 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
403 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
405 warning (OPT_Wattributes,
406 "argument of %qE attribute is not \"ilink1\" or \"ilink2\"",
407 name);
408 *no_add_attrs = true;
411 return NULL_TREE;
415 /* Acceptable arguments to the call insn. */
418 call_address_operand (rtx op, enum machine_mode mode)
420 return (symbolic_operand (op, mode)
421 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
422 || (GET_CODE (op) == REG));
426 call_operand (rtx op, enum machine_mode mode)
428 if (GET_CODE (op) != MEM)
429 return 0;
430 op = XEXP (op, 0);
431 return call_address_operand (op, mode);
434 /* Returns 1 if OP is a symbol reference. */
437 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
439 switch (GET_CODE (op))
441 case SYMBOL_REF:
442 case LABEL_REF:
443 case CONST :
444 return 1;
445 default:
446 return 0;
450 /* Return truth value of statement that OP is a symbolic memory
451 operand of mode MODE. */
454 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
456 if (GET_CODE (op) == SUBREG)
457 op = SUBREG_REG (op);
458 if (GET_CODE (op) != MEM)
459 return 0;
460 op = XEXP (op, 0);
461 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
462 || GET_CODE (op) == LABEL_REF);
465 /* Return true if OP is a short immediate (shimm) value. */
468 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
470 if (GET_CODE (op) != CONST_INT)
471 return 0;
472 return SMALL_INT (INTVAL (op));
475 /* Return true if OP will require a long immediate (limm) value.
476 This is currently only used when calculating length attributes. */
479 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
481 switch (GET_CODE (op))
483 case SYMBOL_REF :
484 case LABEL_REF :
485 case CONST :
486 return 1;
487 case CONST_INT :
488 return !SMALL_INT (INTVAL (op));
489 case CONST_DOUBLE :
490 /* These can happen because large unsigned 32-bit constants are
491 represented this way (the multiplication patterns can cause these
492 to be generated). They also occur for SFmode values. */
493 return 1;
494 default:
495 break;
497 return 0;
500 /* Return true if OP is a MEM that when used as a load or store address will
501 require an 8 byte insn.
502 Load and store instructions don't allow the same possibilities but they're
503 similar enough that this one function will do.
504 This is currently only used when calculating length attributes. */
507 long_immediate_loadstore_operand (rtx op,
508 enum machine_mode mode ATTRIBUTE_UNUSED)
510 if (GET_CODE (op) != MEM)
511 return 0;
513 op = XEXP (op, 0);
514 switch (GET_CODE (op))
516 case SYMBOL_REF :
517 case LABEL_REF :
518 case CONST :
519 return 1;
520 case CONST_INT :
521 /* This must be handled as "st c,[limm]". Ditto for load.
522 Technically, the assembler could translate some possibilities to
523 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
524 assume that it does. */
525 return 1;
526 case CONST_DOUBLE :
527 /* These can happen because large unsigned 32-bit constants are
528 represented this way (the multiplication patterns can cause these
529 to be generated). They also occur for SFmode values. */
530 return 1;
531 case REG :
532 return 0;
533 case PLUS :
534 if (GET_CODE (XEXP (op, 1)) == CONST_INT
535 && !SMALL_INT (INTVAL (XEXP (op, 1))))
536 return 1;
537 return 0;
538 default:
539 break;
541 return 0;
544 /* Return true if OP is an acceptable argument for a single word
545 move source. */
548 move_src_operand (rtx op, enum machine_mode mode)
550 switch (GET_CODE (op))
552 case SYMBOL_REF :
553 case LABEL_REF :
554 case CONST :
555 return 1;
556 case CONST_INT :
557 return (LARGE_INT (INTVAL (op)));
558 case CONST_DOUBLE :
559 /* We can handle DImode integer constants in SImode if the value
560 (signed or unsigned) will fit in 32 bits. This is needed because
561 large unsigned 32-bit constants are represented as CONST_DOUBLEs. */
562 if (mode == SImode)
563 return arc_double_limm_p (op);
564 /* We can handle 32-bit floating point constants. */
565 if (mode == SFmode)
566 return GET_MODE (op) == SFmode;
567 return 0;
568 case REG :
569 return register_operand (op, mode);
570 case SUBREG :
571 /* (subreg (mem ...) ...) can occur here if the inner part was once a
572 pseudo-reg and is now a stack slot. */
573 if (GET_CODE (SUBREG_REG (op)) == MEM)
574 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
575 else
576 return register_operand (op, mode);
577 case MEM :
578 return address_operand (XEXP (op, 0), mode);
579 default :
580 return 0;
584 /* Return true if OP is an acceptable argument for a double word
585 move source. */
588 move_double_src_operand (rtx op, enum machine_mode mode)
590 switch (GET_CODE (op))
592 case REG :
593 return register_operand (op, mode);
594 case SUBREG :
595 /* (subreg (mem ...) ...) can occur here if the inner part was once a
596 pseudo-reg and is now a stack slot. */
597 if (GET_CODE (SUBREG_REG (op)) == MEM)
598 return move_double_src_operand (SUBREG_REG (op), mode);
599 else
600 return register_operand (op, mode);
601 case MEM :
602 /* Disallow auto inc/dec for now. */
603 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
604 || GET_CODE (XEXP (op, 0)) == PRE_INC)
605 return 0;
606 return address_operand (XEXP (op, 0), mode);
607 case CONST_INT :
608 case CONST_DOUBLE :
609 return 1;
610 default :
611 return 0;
615 /* Return true if OP is an acceptable argument for a move destination. */
618 move_dest_operand (rtx op, enum machine_mode mode)
620 switch (GET_CODE (op))
622 case REG :
623 return register_operand (op, mode);
624 case SUBREG :
625 /* (subreg (mem ...) ...) can occur here if the inner part was once a
626 pseudo-reg and is now a stack slot. */
627 if (GET_CODE (SUBREG_REG (op)) == MEM)
628 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
629 else
630 return register_operand (op, mode);
631 case MEM :
632 return address_operand (XEXP (op, 0), mode);
633 default :
634 return 0;
638 /* Return true if OP is valid load with update operand. */
641 load_update_operand (rtx op, enum machine_mode mode)
643 if (GET_CODE (op) != MEM
644 || GET_MODE (op) != mode)
645 return 0;
646 op = XEXP (op, 0);
647 if (GET_CODE (op) != PLUS
648 || GET_MODE (op) != Pmode
649 || !register_operand (XEXP (op, 0), Pmode)
650 || !nonmemory_operand (XEXP (op, 1), Pmode))
651 return 0;
652 return 1;
655 /* Return true if OP is valid store with update operand. */
658 store_update_operand (rtx op, enum machine_mode mode)
660 if (GET_CODE (op) != MEM
661 || GET_MODE (op) != mode)
662 return 0;
663 op = XEXP (op, 0);
664 if (GET_CODE (op) != PLUS
665 || GET_MODE (op) != Pmode
666 || !register_operand (XEXP (op, 0), Pmode)
667 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
668 && SMALL_INT (INTVAL (XEXP (op, 1)))))
669 return 0;
670 return 1;
673 /* Return true if OP is a non-volatile non-immediate operand.
674 Volatile memory refs require a special "cache-bypass" instruction
675 and only the standard movXX patterns are set up to handle them. */
678 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
680 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
681 return 0;
682 return nonimmediate_operand (op, mode);
685 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
686 to check the range carefully since this predicate is used in DImode
687 contexts. */
690 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
692 /* All allowed constants will fit a CONST_INT. */
693 return (GET_CODE (op) == CONST_INT
694 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
697 /* Accept integer operands in the range 0..0xffffffff. We have to check the
698 range carefully since this predicate is used in DImode contexts. Also, we
699 need some extra crud to make it work when hosted on 64-bit machines. */
702 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
704 #if HOST_BITS_PER_WIDE_INT > 32
705 /* All allowed constants will fit a CONST_INT. */
706 return (GET_CODE (op) == CONST_INT
707 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
708 #else
709 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
710 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
711 #endif
714 /* Return 1 if OP is a comparison operator valid for the mode of CC.
715 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
717 Some insns only set a few bits in the condition code. So only allow those
718 comparisons that use the bits that are valid. */
721 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
723 enum rtx_code code;
724 if (!COMPARISON_P (op))
725 return 0;
727 code = GET_CODE (op);
728 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
729 return (code == EQ || code == NE);
730 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
731 return (code == EQ || code == NE
732 || code == LTU || code == GEU || code == GTU || code == LEU);
733 return 1;
736 /* Misc. utilities. */
738 /* X and Y are two things to compare using CODE. Return the rtx
739 for the cc reg in the proper mode. */
742 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
744 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
745 return gen_rtx_REG (mode, 61);
748 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
749 We assume the value can be either signed or unsigned. */
752 arc_double_limm_p (rtx value)
754 HOST_WIDE_INT low, high;
756 gcc_assert (GET_CODE (value) == CONST_DOUBLE);
758 low = CONST_DOUBLE_LOW (value);
759 high = CONST_DOUBLE_HIGH (value);
761 if (low & 0x80000000)
763 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
764 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
765 == - (unsigned HOST_WIDE_INT) 0x80000000)
766 && high == -1));
768 else
770 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
774 /* Do any needed setup for a variadic function. For the ARC, we must
775 create a register parameter block, and then copy any anonymous arguments
776 in registers to memory.
778 CUM has not been updated for the last named argument which has type TYPE
779 and mode MODE, and we rely on this fact.
781 We do things a little weird here. We're supposed to only allocate space
782 for the anonymous arguments. However we need to keep the stack eight byte
783 aligned. So we round the space up if necessary, and leave it to va_start
784 to compensate. */
786 static void
787 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
788 enum machine_mode mode,
789 tree type ATTRIBUTE_UNUSED,
790 int *pretend_size,
791 int no_rtl)
793 int first_anon_arg;
795 /* All BLKmode values are passed by reference. */
796 gcc_assert (mode != BLKmode);
798 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
799 / UNITS_PER_WORD);
801 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
803 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
804 int first_reg_offset = first_anon_arg;
805 /* Size in words to "pretend" allocate. */
806 int size = MAX_ARC_PARM_REGS - first_reg_offset;
807 /* Extra slop to keep stack eight byte aligned. */
808 int align_slop = size & 1;
809 rtx regblock;
811 regblock = gen_rtx_MEM (BLKmode,
812 plus_constant (arg_pointer_rtx,
813 FIRST_PARM_OFFSET (0)
814 + align_slop * UNITS_PER_WORD));
815 set_mem_alias_set (regblock, get_varargs_alias_set ());
816 set_mem_align (regblock, BITS_PER_WORD);
817 move_block_from_reg (first_reg_offset, regblock,
818 MAX_ARC_PARM_REGS - first_reg_offset);
820 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
821 * UNITS_PER_WORD);
825 /* Cost functions. */
827 /* Compute a (partial) cost for rtx X. Return true if the complete
828 cost has been computed, and false if subexpressions should be
829 scanned. In either case, *TOTAL contains the cost result. */
831 static bool
832 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
833 bool speed ATTRIBUTE_UNUSED)
835 switch (code)
837 /* Small integers are as cheap as registers. 4 byte values can
838 be fetched as immediate constants - let's give that the cost
839 of an extra insn. */
840 case CONST_INT:
841 if (SMALL_INT (INTVAL (x)))
843 *total = 0;
844 return true;
846 /* FALLTHRU */
848 case CONST:
849 case LABEL_REF:
850 case SYMBOL_REF:
851 *total = COSTS_N_INSNS (1);
852 return true;
854 case CONST_DOUBLE:
856 rtx high, low;
857 split_double (x, &high, &low);
858 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
859 + !SMALL_INT (INTVAL (low)));
860 return true;
863 /* Encourage synth_mult to find a synthetic multiply when reasonable.
864 If we need more than 12 insns to do a multiply, then go out-of-line,
865 since the call overhead will be < 10% of the cost of the multiply. */
866 case ASHIFT:
867 case ASHIFTRT:
868 case LSHIFTRT:
869 if (TARGET_SHIFTER)
870 *total = COSTS_N_INSNS (1);
871 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
872 *total = COSTS_N_INSNS (16);
873 else
874 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
875 return false;
877 default:
878 return false;
883 /* Provide the costs of an addressing mode that contains ADDR.
884 If ADDR is not a valid address, its cost is irrelevant. */
886 static int
887 arc_address_cost (rtx addr, bool speed ATTRIBUTE_UNUSED)
889 switch (GET_CODE (addr))
891 case REG :
892 return 1;
894 case LABEL_REF :
895 case SYMBOL_REF :
896 case CONST :
897 return 2;
899 case PLUS :
901 register rtx plus0 = XEXP (addr, 0);
902 register rtx plus1 = XEXP (addr, 1);
904 if (GET_CODE (plus0) != REG)
905 break;
907 switch (GET_CODE (plus1))
909 case CONST_INT :
910 return SMALL_INT (INTVAL (plus1)) ? 1 : 2;
911 case CONST :
912 case SYMBOL_REF :
913 case LABEL_REF :
914 return 2;
915 default:
916 break;
918 break;
920 default:
921 break;
924 return 4;
927 /* Function prologue/epilogue handlers. */
929 /* ARC stack frames look like:
931 Before call After call
932 +-----------------------+ +-----------------------+
933 | | | |
934 high | local variables, | | local variables, |
935 mem | reg save area, etc. | | reg save area, etc. |
936 | | | |
937 +-----------------------+ +-----------------------+
938 | | | |
939 | arguments on stack. | | arguments on stack. |
940 | | | |
941 SP+16->+-----------------------+FP+48->+-----------------------+
942 | 4 word save area for | | reg parm save area, |
943 | return addr, prev %fp | | only created for |
944 SP+0->+-----------------------+ | variable argument |
945 | functions |
946 FP+16->+-----------------------+
947 | 4 word save area for |
948 | return addr, prev %fp |
949 FP+0->+-----------------------+
950 | |
951 | local variables |
952 | |
953 +-----------------------+
954 | |
955 | register save area |
956 | |
957 +-----------------------+
958 | |
959 | alloca allocations |
960 | |
961 +-----------------------+
962 | |
963 | arguments on stack |
964 | |
965 SP+16->+-----------------------+
966 low | 4 word save area for |
967 memory | return addr, prev %fp |
968 SP+0->+-----------------------+
970 Notes:
971 1) The "reg parm save area" does not exist for non variable argument fns.
972 The "reg parm save area" can be eliminated completely if we created our
973 own va-arc.h, but that has tradeoffs as well (so it's not done). */
975 /* Structure to be filled in by arc_compute_frame_size with register
976 save masks, and offsets for the current function. */
977 struct arc_frame_info
979 unsigned int total_size; /* # bytes that the entire frame takes up. */
980 unsigned int extra_size; /* # bytes of extra stuff. */
981 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
982 unsigned int args_size; /* # bytes that outgoing arguments take up. */
983 unsigned int reg_size; /* # bytes needed to store regs. */
984 unsigned int var_size; /* # bytes that variables take up. */
985 unsigned int reg_offset; /* Offset from new sp to store regs. */
986 unsigned int gmask; /* Mask of saved gp registers. */
987 int initialized; /* Nonzero if frame size already calculated. */
990 /* Current frame information calculated by arc_compute_frame_size. */
991 static struct arc_frame_info current_frame_info;
993 /* Zero structure to initialize current_frame_info. */
994 static struct arc_frame_info zero_frame_info;
996 /* Type of function DECL.
998 The result is cached. To reset the cache at the end of a function,
999 call with DECL = NULL_TREE. */
1001 enum arc_function_type
1002 arc_compute_function_type (tree decl)
1004 tree a;
1005 /* Cached value. */
1006 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1007 /* Last function we were called for. */
1008 static tree last_fn = NULL_TREE;
1010 /* Resetting the cached value? */
1011 if (decl == NULL_TREE)
1013 fn_type = ARC_FUNCTION_UNKNOWN;
1014 last_fn = NULL_TREE;
1015 return fn_type;
1018 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1019 return fn_type;
1021 /* Assume we have a normal function (not an interrupt handler). */
1022 fn_type = ARC_FUNCTION_NORMAL;
1024 /* Now see if this is an interrupt handler. */
1025 for (a = DECL_ATTRIBUTES (current_function_decl);
1027 a = TREE_CHAIN (a))
1029 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1031 if (name == get_identifier ("__interrupt__")
1032 && list_length (args) == 1
1033 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1035 tree value = TREE_VALUE (args);
1037 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1038 fn_type = ARC_FUNCTION_ILINK1;
1039 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1040 fn_type = ARC_FUNCTION_ILINK2;
1041 else
1042 gcc_unreachable ();
1043 break;
1047 last_fn = decl;
1048 return fn_type;
1051 #define ILINK1_REGNUM 29
1052 #define ILINK2_REGNUM 30
1053 #define RETURN_ADDR_REGNUM 31
1054 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1055 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1057 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1058 The return address and frame pointer are treated separately.
1059 Don't consider them here. */
1060 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1061 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1062 && (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
1064 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM))
1066 /* Return the bytes needed to compute the frame pointer from the current
1067 stack pointer.
1069 SIZE is the size needed for local variables. */
1071 unsigned int
1072 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1074 int regno;
1075 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1076 unsigned int reg_size, reg_offset;
1077 unsigned int gmask;
1078 enum arc_function_type fn_type;
1079 int interrupt_p;
1081 var_size = size;
1082 args_size = crtl->outgoing_args_size;
1083 pretend_size = crtl->args.pretend_args_size;
1084 extra_size = FIRST_PARM_OFFSET (0);
1085 total_size = extra_size + pretend_size + args_size + var_size;
1086 reg_offset = FIRST_PARM_OFFSET(0) + crtl->outgoing_args_size;
1087 reg_size = 0;
1088 gmask = 0;
1090 /* See if this is an interrupt handler. Call used registers must be saved
1091 for them too. */
1092 fn_type = arc_compute_function_type (current_function_decl);
1093 interrupt_p = ARC_INTERRUPT_P (fn_type);
1095 /* Calculate space needed for registers.
1096 ??? We ignore the extension registers for now. */
1098 for (regno = 0; regno <= 31; regno++)
1100 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1102 reg_size += UNITS_PER_WORD;
1103 gmask |= 1 << regno;
1107 total_size += reg_size;
1109 /* If the only space to allocate is the fp/blink save area this is an
1110 empty frame. However, if we'll be making a function call we need to
1111 allocate a stack frame for our callee's fp/blink save area. */
1112 if (total_size == extra_size
1113 && !MUST_SAVE_RETURN_ADDR)
1114 total_size = extra_size = 0;
1116 total_size = ARC_STACK_ALIGN (total_size);
1118 /* Save computed information. */
1119 current_frame_info.total_size = total_size;
1120 current_frame_info.extra_size = extra_size;
1121 current_frame_info.pretend_size = pretend_size;
1122 current_frame_info.var_size = var_size;
1123 current_frame_info.args_size = args_size;
1124 current_frame_info.reg_size = reg_size;
1125 current_frame_info.reg_offset = reg_offset;
1126 current_frame_info.gmask = gmask;
1127 current_frame_info.initialized = reload_completed;
1129 /* Ok, we're done. */
1130 return total_size;
1133 /* Common code to save/restore registers. */
1135 void
1136 arc_save_restore (FILE *file,
1137 const char *base_reg,
1138 unsigned int offset,
1139 unsigned int gmask,
1140 const char *op)
1142 int regno;
1144 if (gmask == 0)
1145 return;
1147 for (regno = 0; regno <= 31; regno++)
1149 if ((gmask & (1L << regno)) != 0)
1151 fprintf (file, "\t%s %s,[%s,%d]\n",
1152 op, reg_names[regno], base_reg, offset);
1153 offset += UNITS_PER_WORD;
1158 /* Target hook to assemble an integer object. The ARC version needs to
1159 emit a special directive for references to labels and function
1160 symbols. */
1162 static bool
1163 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1165 if (size == UNITS_PER_WORD && aligned_p
1166 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1167 || GET_CODE (x) == LABEL_REF))
1169 fputs ("\t.word\t%st(", asm_out_file);
1170 output_addr_const (asm_out_file, x);
1171 fputs (")\n", asm_out_file);
1172 return true;
1174 return default_assemble_integer (x, size, aligned_p);
1177 /* Set up the stack and frame pointer (if desired) for the function. */
1179 static void
1180 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1182 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1183 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1184 unsigned int gmask = current_frame_info.gmask;
1185 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1187 /* If this is an interrupt handler, set up our stack frame.
1188 ??? Optimize later. */
1189 if (ARC_INTERRUPT_P (fn_type))
1191 fprintf (file, "\t%s interrupt handler\n",
1192 ASM_COMMENT_START);
1193 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1196 /* This is only for the human reader. */
1197 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1198 ASM_COMMENT_START, ASM_COMMENT_START,
1199 current_frame_info.var_size,
1200 current_frame_info.reg_size / 4,
1201 current_frame_info.args_size,
1202 current_frame_info.extra_size);
1204 size = ARC_STACK_ALIGN (size);
1205 size = (! current_frame_info.initialized
1206 ? arc_compute_frame_size (size)
1207 : current_frame_info.total_size);
1209 /* These cases shouldn't happen. Catch them now. */
1210 gcc_assert (size || !gmask);
1212 /* Allocate space for register arguments if this is a variadic function. */
1213 if (current_frame_info.pretend_size != 0)
1214 fprintf (file, "\tsub %s,%s,%d\n",
1215 sp_str, sp_str, current_frame_info.pretend_size);
1217 /* The home-grown ABI says link register is saved first. */
1218 if (MUST_SAVE_RETURN_ADDR)
1219 fprintf (file, "\tst %s,[%s,%d]\n",
1220 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1222 /* Set up the previous frame pointer next (if we need to). */
1223 if (frame_pointer_needed)
1225 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1226 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1229 /* ??? We don't handle the case where the saved regs are more than 252
1230 bytes away from sp. This can be handled by decrementing sp once, saving
1231 the regs, and then decrementing it again. The epilogue doesn't have this
1232 problem as the `ld' insn takes reg+limm values (though it would be more
1233 efficient to avoid reg+limm). */
1235 /* Allocate the stack frame. */
1236 if (size - current_frame_info.pretend_size > 0)
1237 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1238 sp_str, sp_str, size - current_frame_info.pretend_size);
1240 /* Save any needed call-saved regs (and call-used if this is an
1241 interrupt handler). */
1242 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1243 /* The zeroing of these two bits is unnecessary,
1244 but leave this in for clarity. */
1245 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1246 "st");
1248 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1251 /* Do any necessary cleanup after a function to restore stack, frame,
1252 and regs. */
1254 static void
1255 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1257 rtx epilogue_delay = crtl->epilogue_delay_list;
1258 int noepilogue = FALSE;
1259 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1261 /* This is only for the human reader. */
1262 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1264 size = ARC_STACK_ALIGN (size);
1265 size = (!current_frame_info.initialized
1266 ? arc_compute_frame_size (size)
1267 : current_frame_info.total_size);
1269 if (size == 0 && epilogue_delay == 0)
1271 rtx insn = get_last_insn ();
1273 /* If the last insn was a BARRIER, we don't have to write any code
1274 because a jump (aka return) was put there. */
1275 if (GET_CODE (insn) == NOTE)
1276 insn = prev_nonnote_insn (insn);
1277 if (insn && GET_CODE (insn) == BARRIER)
1278 noepilogue = TRUE;
1281 if (!noepilogue)
1283 unsigned int pretend_size = current_frame_info.pretend_size;
1284 unsigned int frame_size = size - pretend_size;
1285 int restored, fp_restored_p;
1286 int can_trust_sp_p = !cfun->calls_alloca;
1287 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1288 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1290 /* ??? There are lots of optimizations that can be done here.
1291 EG: Use fp to restore regs if it's closer.
1292 Maybe in time we'll do them all. For now, always restore regs from
1293 sp, but don't restore sp if we don't have to. */
1295 if (!can_trust_sp_p)
1297 gcc_assert (frame_pointer_needed);
1298 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1299 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1302 /* Restore any saved registers. */
1303 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1304 /* The zeroing of these two bits is unnecessary,
1305 but leave this in for clarity. */
1306 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1307 "ld");
1309 if (MUST_SAVE_RETURN_ADDR)
1310 fprintf (file, "\tld %s,[%s,%d]\n",
1311 reg_names[RETURN_ADDR_REGNUM],
1312 frame_pointer_needed ? fp_str : sp_str,
1313 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1315 /* Keep track of how much of the stack pointer we've restored.
1316 It makes the following a lot more readable. */
1317 restored = 0;
1318 fp_restored_p = 0;
1320 /* We try to emit the epilogue delay slot insn right after the load
1321 of the return address register so that it can execute with the
1322 stack intact. Secondly, loads are delayed. */
1323 /* ??? If stack intactness is important, always emit now. */
1324 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1326 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1327 epilogue_delay = NULL_RTX;
1330 if (frame_pointer_needed)
1332 /* Try to restore the frame pointer in the delay slot. We can't,
1333 however, if any of these is true. */
1334 if (epilogue_delay != NULL_RTX
1335 || !SMALL_INT (frame_size)
1336 || pretend_size
1337 || ARC_INTERRUPT_P (fn_type))
1339 /* Note that we restore fp and sp here! */
1340 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1341 restored += frame_size;
1342 fp_restored_p = 1;
1345 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1346 || ARC_INTERRUPT_P (fn_type))
1348 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1349 restored += frame_size;
1352 /* These must be done before the return insn because the delay slot
1353 does the final stack restore. */
1354 if (ARC_INTERRUPT_P (fn_type))
1356 if (epilogue_delay)
1358 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1362 /* Emit the return instruction. */
1364 static const int regs[4] = {
1365 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1368 /* Update the flags, if returning from an interrupt handler. */
1369 if (ARC_INTERRUPT_P (fn_type))
1370 fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
1371 else
1372 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1375 /* If the only register saved is the return address, we need a
1376 nop, unless we have an instruction to put into it. Otherwise
1377 we don't since reloading multiple registers doesn't reference
1378 the register being loaded. */
1380 if (ARC_INTERRUPT_P (fn_type))
1381 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1382 else if (epilogue_delay != NULL_RTX)
1384 gcc_assert (!frame_pointer_needed || fp_restored_p);
1385 gcc_assert (restored >= size);
1386 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1388 else if (frame_pointer_needed && !fp_restored_p)
1390 gcc_assert (SMALL_INT (frame_size));
1391 /* Note that we restore fp and sp here! */
1392 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1394 else if (restored < size)
1396 gcc_assert (SMALL_INT (size - restored));
1397 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1398 sp_str, sp_str, size - restored);
1400 else
1401 fprintf (file, "\tnop\n");
1404 /* Reset state info for each function. */
1405 current_frame_info = zero_frame_info;
1406 arc_compute_function_type (NULL_TREE);
1409 /* Define the number of delay slots needed for the function epilogue.
1411 Interrupt handlers can't have any epilogue delay slots (it's always needed
1412 for something else, I think). For normal functions, we have to worry about
1413 using call-saved regs as they'll be restored before the delay slot insn.
1414 Functions with non-empty frames already have enough choices for the epilogue
1415 delay slot so for now we only consider functions with empty frames. */
1418 arc_delay_slots_for_epilogue (void)
1420 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1421 return 0;
1422 if (!current_frame_info.initialized)
1423 (void) arc_compute_frame_size (get_frame_size ());
1424 if (current_frame_info.total_size == 0)
1425 return 1;
1426 return 0;
1429 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1430 Any single length instruction which doesn't reference the stack or frame
1431 pointer or any call-saved register is OK. SLOT will always be 0. */
1434 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1436 gcc_assert (!slot);
1438 if (get_attr_length (trial) == 1
1439 /* If registers where saved, presumably there's more than enough
1440 possibilities for the delay slot. The alternative is something
1441 more complicated (of course, if we expanded the epilogue as rtl
1442 this problem would go away). */
1443 /* ??? Note that this will always be true since only functions with
1444 empty frames have epilogue delay slots. See
1445 arc_delay_slots_for_epilogue. */
1446 && current_frame_info.gmask == 0
1447 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1448 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1449 return 1;
1450 return 0;
1453 /* Return true if OP is a shift operator. */
1456 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1458 switch (GET_CODE (op))
1460 case ASHIFTRT:
1461 case LSHIFTRT:
1462 case ASHIFT:
1463 return 1;
1464 default:
1465 return 0;
1469 /* Output the assembler code for doing a shift.
1470 We go to a bit of trouble to generate efficient code as the ARC only has
1471 single bit shifts. This is taken from the h8300 port. We only have one
1472 mode of shifting and can't access individual bytes like the h8300 can, so
1473 this is greatly simplified (at the expense of not generating hyper-
1474 efficient code).
1476 This function is not used if the variable shift insns are present. */
1478 /* ??? We assume the output operand is the same as operand 1.
1479 This can be optimized (deleted) in the case of 1 bit shifts. */
1480 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1481 using it here will give us a chance to play with it. */
1483 const char *
1484 output_shift (rtx *operands)
1486 rtx shift = operands[3];
1487 enum machine_mode mode = GET_MODE (shift);
1488 enum rtx_code code = GET_CODE (shift);
1489 const char *shift_one;
1491 gcc_assert (mode == SImode);
1493 switch (code)
1495 case ASHIFT: shift_one = "asl %0,%0"; break;
1496 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1497 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1498 default: gcc_unreachable ();
1501 if (GET_CODE (operands[2]) != CONST_INT)
1503 if (optimize)
1505 output_asm_insn ("sub.f 0,%2,0", operands);
1506 output_asm_insn ("mov lp_count,%2", operands);
1507 output_asm_insn ("bz 2f", operands);
1509 else
1510 output_asm_insn ("mov %4,%2", operands);
1511 goto shiftloop;
1513 else
1515 int n;
1517 /* If the count is negative, make it 0. */
1518 n = INTVAL (operands[2]);
1519 if (n < 0)
1520 n = 0;
1521 /* If the count is too big, truncate it.
1522 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1523 do the intuitive thing. */
1524 else if (n > GET_MODE_BITSIZE (mode))
1525 n = GET_MODE_BITSIZE (mode);
1527 /* First see if we can do them inline. */
1528 if (n <= 8)
1530 while (--n >= 0)
1531 output_asm_insn (shift_one, operands);
1533 /* See if we can use a rotate/and. */
1534 else if (n == BITS_PER_WORD - 1)
1536 switch (code)
1538 case ASHIFT :
1539 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1540 break;
1541 case ASHIFTRT :
1542 /* The ARC doesn't have a rol insn. Use something else. */
1543 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1544 break;
1545 case LSHIFTRT :
1546 /* The ARC doesn't have a rol insn. Use something else. */
1547 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1548 break;
1549 default:
1550 break;
1553 /* Must loop. */
1554 else
1556 char buf[100];
1558 if (optimize)
1559 output_asm_insn ("mov lp_count,%c2", operands);
1560 else
1561 output_asm_insn ("mov %4,%c2", operands);
1562 shiftloop:
1563 if (optimize)
1565 if (flag_pic)
1566 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1567 ASM_COMMENT_START);
1568 else
1569 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1570 ASM_COMMENT_START);
1571 output_asm_insn (buf, operands);
1572 output_asm_insn ("sr %4,[lp_start]", operands);
1573 output_asm_insn ("add %4,%4,1", operands);
1574 output_asm_insn ("sr %4,[lp_end]", operands);
1575 output_asm_insn ("nop\n\tnop", operands);
1576 if (flag_pic)
1577 fprintf (asm_out_file, "\t%s single insn loop\n",
1578 ASM_COMMENT_START);
1579 else
1580 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1581 ASM_COMMENT_START);
1582 output_asm_insn (shift_one, operands);
1583 fprintf (asm_out_file, "2:\t%s end single insn loop\n",
1584 ASM_COMMENT_START);
1586 else
1588 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1589 ASM_COMMENT_START);
1590 output_asm_insn ("sub.f %4,%4,1", operands);
1591 output_asm_insn ("nop", operands);
1592 output_asm_insn ("bn.nd 2f", operands);
1593 output_asm_insn (shift_one, operands);
1594 output_asm_insn ("b.nd 1b", operands);
1595 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1596 ASM_COMMENT_START);
1601 return "";
1604 /* Nested function support. */
1606 /* Emit RTL insns to initialize the variable parts of a trampoline.
1607 FNADDR is an RTX for the address of the function's pure code.
1608 CXT is an RTX for the static chain value for the function. */
1610 void
1611 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1612 rtx fnaddr ATTRIBUTE_UNUSED,
1613 rtx cxt ATTRIBUTE_UNUSED)
1617 /* Set the cpu type and print out other fancy things,
1618 at the top of the file. */
1620 static void
1621 arc_file_start (void)
1623 default_file_start ();
1624 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1627 /* Print operand X (an rtx) in assembler syntax to file FILE.
1628 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1629 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1631 void
1632 arc_print_operand (FILE *file, rtx x, int code)
1634 switch (code)
1636 case '#' :
1637 /* Conditional branches. For now these are equivalent. */
1638 case '*' :
1639 /* Unconditional branches. Output the appropriate delay slot suffix. */
1640 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1642 /* There's nothing in the delay slot. */
1643 fputs (".nd", file);
1645 else
1647 rtx jump = XVECEXP (final_sequence, 0, 0);
1648 rtx delay = XVECEXP (final_sequence, 0, 1);
1649 if (INSN_ANNULLED_BRANCH_P (jump))
1650 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1651 else
1652 fputs (".d", file);
1654 return;
1655 case '?' : /* with leading "." */
1656 case '!' : /* without leading "." */
1657 /* This insn can be conditionally executed. See if the ccfsm machinery
1658 says it should be conditionalized. */
1659 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1661 /* Is this insn in a delay slot? */
1662 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1664 rtx insn = XVECEXP (final_sequence, 0, 1);
1666 /* If the insn is annulled and is from the target path, we need
1667 to inverse the condition test. */
1668 if (INSN_ANNULLED_BRANCH_P (insn))
1670 if (INSN_FROM_TARGET_P (insn))
1671 fprintf (file, "%s%s",
1672 code == '?' ? "." : "",
1673 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1674 else
1675 fprintf (file, "%s%s",
1676 code == '?' ? "." : "",
1677 arc_condition_codes[arc_ccfsm_current_cc]);
1679 else
1681 /* This insn is executed for either path, so don't
1682 conditionalize it at all. */
1683 ; /* nothing to do */
1686 else
1688 /* This insn isn't in a delay slot. */
1689 fprintf (file, "%s%s",
1690 code == '?' ? "." : "",
1691 arc_condition_codes[arc_ccfsm_current_cc]);
1694 return;
1695 case '~' :
1696 /* Output a nop if we're between a set of the condition codes,
1697 and a conditional branch. */
1698 if (last_insn_set_cc_p)
1699 fputs ("nop\n\t", file);
1700 return;
1701 case 'd' :
1702 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1703 return;
1704 case 'D' :
1705 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1706 (get_arc_condition_code (x))],
1707 file);
1708 return;
1709 case 'R' :
1710 /* Write second word of DImode or DFmode reference,
1711 register or memory. */
1712 if (GET_CODE (x) == REG)
1713 fputs (reg_names[REGNO (x)+1], file);
1714 else if (GET_CODE (x) == MEM)
1716 fputc ('[', file);
1717 /* Handle possible auto-increment. Since it is pre-increment and
1718 we have already done it, we can just use an offset of four. */
1719 /* ??? This is taken from rs6000.c I think. I don't think it is
1720 currently necessary, but keep it around. */
1721 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1722 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1723 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1724 else
1725 output_address (plus_constant (XEXP (x, 0), 4));
1726 fputc (']', file);
1728 else
1729 output_operand_lossage ("invalid operand to %%R code");
1730 return;
1731 case 'S' :
1732 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1733 || GET_CODE (x) == LABEL_REF)
1735 fprintf (file, "%%st(");
1736 output_addr_const (file, x);
1737 fprintf (file, ")");
1738 return;
1740 break;
1741 case 'H' :
1742 case 'L' :
1743 if (GET_CODE (x) == REG)
1745 /* L = least significant word, H = most significant word */
1746 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1747 fputs (reg_names[REGNO (x)], file);
1748 else
1749 fputs (reg_names[REGNO (x)+1], file);
1751 else if (GET_CODE (x) == CONST_INT
1752 || GET_CODE (x) == CONST_DOUBLE)
1754 rtx first, second;
1756 split_double (x, &first, &second);
1757 fprintf (file, "0x%08lx",
1758 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1760 else
1761 output_operand_lossage ("invalid operand to %%H/%%L code");
1762 return;
1763 case 'A' :
1765 char str[30];
1767 gcc_assert (GET_CODE (x) == CONST_DOUBLE
1768 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1770 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1771 fprintf (file, "%s", str);
1772 return;
1774 case 'U' :
1775 /* Output a load/store with update indicator if appropriate. */
1776 if (GET_CODE (x) == MEM)
1778 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1779 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1780 fputs (".a", file);
1782 else
1783 output_operand_lossage ("invalid operand to %%U code");
1784 return;
1785 case 'V' :
1786 /* Output cache bypass indicator for a load/store insn. Volatile memory
1787 refs are defined to use the cache bypass mechanism. */
1788 if (GET_CODE (x) == MEM)
1790 if (MEM_VOLATILE_P (x))
1791 fputs (".di", file);
1793 else
1794 output_operand_lossage ("invalid operand to %%V code");
1795 return;
1796 case 0 :
1797 /* Do nothing special. */
1798 break;
1799 default :
1800 /* Unknown flag. */
1801 output_operand_lossage ("invalid operand output code");
1804 switch (GET_CODE (x))
1806 case REG :
1807 fputs (reg_names[REGNO (x)], file);
1808 break;
1809 case MEM :
1810 fputc ('[', file);
1811 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1812 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1813 GET_MODE_SIZE (GET_MODE (x))));
1814 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1815 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1816 - GET_MODE_SIZE (GET_MODE (x))));
1817 else
1818 output_address (XEXP (x, 0));
1819 fputc (']', file);
1820 break;
1821 case CONST_DOUBLE :
1822 /* We handle SFmode constants here as output_addr_const doesn't. */
1823 if (GET_MODE (x) == SFmode)
1825 REAL_VALUE_TYPE d;
1826 long l;
1828 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1829 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1830 fprintf (file, "0x%08lx", l);
1831 break;
1833 /* Fall through. Let output_addr_const deal with it. */
1834 default :
1835 output_addr_const (file, x);
1836 break;
1840 /* Print a memory address as an operand to reference that memory location. */
1842 void
1843 arc_print_operand_address (FILE *file, rtx addr)
1845 register rtx base, index = 0;
1846 int offset = 0;
1848 switch (GET_CODE (addr))
1850 case REG :
1851 fputs (reg_names[REGNO (addr)], file);
1852 break;
1853 case SYMBOL_REF :
1854 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1856 fprintf (file, "%%st(");
1857 output_addr_const (file, addr);
1858 fprintf (file, ")");
1860 else
1861 output_addr_const (file, addr);
1862 break;
1863 case PLUS :
1864 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1865 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1866 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1867 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1868 else
1869 base = XEXP (addr, 0), index = XEXP (addr, 1);
1870 gcc_assert (GET_CODE (base) == REG);
1871 fputs (reg_names[REGNO (base)], file);
1872 if (index == 0)
1874 if (offset != 0)
1875 fprintf (file, ",%d", offset);
1877 else
1879 switch (GET_CODE (index))
1881 case REG:
1882 fprintf (file, ",%s", reg_names[REGNO (index)]);
1883 break;
1884 case SYMBOL_REF:
1885 fputc (',', file), output_addr_const (file, index);
1886 break;
1887 default:
1888 gcc_unreachable ();
1891 break;
1892 case PRE_INC :
1893 case PRE_DEC :
1894 /* We shouldn't get here as we've lost the mode of the memory object
1895 (which says how much to inc/dec by. */
1896 gcc_unreachable ();
1897 break;
1898 default :
1899 output_addr_const (file, addr);
1900 break;
1904 /* Update compare/branch separation marker. */
1906 static void
1907 record_cc_ref (rtx insn)
1909 last_insn_set_cc_p = current_insn_set_cc_p;
1911 switch (get_attr_cond (insn))
1913 case COND_SET :
1914 case COND_SET_ZN :
1915 case COND_SET_ZNC :
1916 if (get_attr_length (insn) == 1)
1917 current_insn_set_cc_p = 1;
1918 else
1919 current_insn_set_cc_p = 0;
1920 break;
1921 default :
1922 current_insn_set_cc_p = 0;
1923 break;
1927 /* Conditional execution support.
1929 This is based on the ARM port but for now is much simpler.
1931 A finite state machine takes care of noticing whether or not instructions
1932 can be conditionally executed, and thus decrease execution time and code
1933 size by deleting branch instructions. The fsm is controlled by
1934 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1935 in the .md file for the branch insns also have a hand in this. */
1937 /* The state of the fsm controlling condition codes are:
1938 0: normal, do nothing special
1939 1: don't output this insn
1940 2: don't output this insn
1941 3: make insns conditional
1942 4: make insns conditional
1944 State transitions (state->state by whom, under what condition):
1945 0 -> 1 final_prescan_insn, if insn is conditional branch
1946 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1947 1 -> 3 branch patterns, after having not output the conditional branch
1948 2 -> 4 branch patterns, after having not output the conditional branch
1949 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1950 (the target label has CODE_LABEL_NUMBER equal to
1951 arc_ccfsm_target_label).
1952 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1954 If the jump clobbers the conditions then we use states 2 and 4.
1956 A similar thing can be done with conditional return insns.
1958 We also handle separating branches from sets of the condition code.
1959 This is done here because knowledge of the ccfsm state is required,
1960 we may not be outputting the branch. */
1962 void
1963 arc_final_prescan_insn (rtx insn,
1964 rtx *opvec ATTRIBUTE_UNUSED,
1965 int noperands ATTRIBUTE_UNUSED)
1967 /* BODY will hold the body of INSN. */
1968 register rtx body = PATTERN (insn);
1970 /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1971 an if/then/else), and things need to be reversed. */
1972 int reverse = 0;
1974 /* If we start with a return insn, we only succeed if we find another one. */
1975 int seeking_return = 0;
1977 /* START_INSN will hold the insn from where we start looking. This is the
1978 first insn after the following code_label if REVERSE is true. */
1979 rtx start_insn = insn;
1981 /* Update compare/branch separation marker. */
1982 record_cc_ref (insn);
1984 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1985 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1986 final_scan_insn which has `optimize' as a local. */
1987 if (optimize < 2 || TARGET_NO_COND_EXEC)
1988 return;
1990 /* If in state 4, check if the target branch is reached, in order to
1991 change back to state 0. */
1992 if (arc_ccfsm_state == 4)
1994 if (insn == arc_ccfsm_target_insn)
1996 arc_ccfsm_target_insn = NULL;
1997 arc_ccfsm_state = 0;
1999 return;
2002 /* If in state 3, it is possible to repeat the trick, if this insn is an
2003 unconditional branch to a label, and immediately following this branch
2004 is the previous target label which is only used once, and the label this
2005 branch jumps to is not too far off. Or in other words "we've done the
2006 `then' part, see if we can do the `else' part." */
2007 if (arc_ccfsm_state == 3)
2009 if (simplejump_p (insn))
2011 start_insn = next_nonnote_insn (start_insn);
2012 if (GET_CODE (start_insn) == BARRIER)
2014 /* ??? Isn't this always a barrier? */
2015 start_insn = next_nonnote_insn (start_insn);
2017 if (GET_CODE (start_insn) == CODE_LABEL
2018 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2019 && LABEL_NUSES (start_insn) == 1)
2020 reverse = TRUE;
2021 else
2022 return;
2024 else if (GET_CODE (body) == RETURN)
2026 start_insn = next_nonnote_insn (start_insn);
2027 if (GET_CODE (start_insn) == BARRIER)
2028 start_insn = next_nonnote_insn (start_insn);
2029 if (GET_CODE (start_insn) == CODE_LABEL
2030 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2031 && LABEL_NUSES (start_insn) == 1)
2033 reverse = TRUE;
2034 seeking_return = 1;
2036 else
2037 return;
2039 else
2040 return;
2043 if (GET_CODE (insn) != JUMP_INSN)
2044 return;
2046 /* This jump might be paralleled with a clobber of the condition codes,
2047 the jump should always come first. */
2048 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2049 body = XVECEXP (body, 0, 0);
2051 if (reverse
2052 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2053 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2055 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2056 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2057 int then_not_else = TRUE;
2058 /* Nonzero if next insn must be the target label. */
2059 int next_must_be_target_label_p;
2060 rtx this_insn = start_insn, label = 0;
2062 /* Register the insn jumped to. */
2063 if (reverse)
2065 if (!seeking_return)
2066 label = XEXP (SET_SRC (body), 0);
2068 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2069 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2070 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2072 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2073 then_not_else = FALSE;
2075 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2076 seeking_return = 1;
2077 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2079 seeking_return = 1;
2080 then_not_else = FALSE;
2082 else
2083 gcc_unreachable ();
2085 /* See how many insns this branch skips, and what kind of insns. If all
2086 insns are okay, and the label or unconditional branch to the same
2087 label is not too far away, succeed. */
2088 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2089 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2090 insns_skipped++)
2092 rtx scanbody;
2094 this_insn = next_nonnote_insn (this_insn);
2095 if (!this_insn)
2096 break;
2098 if (next_must_be_target_label_p)
2100 if (GET_CODE (this_insn) == BARRIER)
2101 continue;
2102 if (GET_CODE (this_insn) == CODE_LABEL
2103 && this_insn == label)
2105 arc_ccfsm_state = 1;
2106 succeed = TRUE;
2108 else
2109 fail = TRUE;
2110 break;
2113 scanbody = PATTERN (this_insn);
2115 switch (GET_CODE (this_insn))
2117 case CODE_LABEL:
2118 /* Succeed if it is the target label, otherwise fail since
2119 control falls in from somewhere else. */
2120 if (this_insn == label)
2122 arc_ccfsm_state = 1;
2123 succeed = TRUE;
2125 else
2126 fail = TRUE;
2127 break;
2129 case BARRIER:
2130 /* Succeed if the following insn is the target label.
2131 Otherwise fail.
2132 If return insns are used then the last insn in a function
2133 will be a barrier. */
2134 next_must_be_target_label_p = TRUE;
2135 break;
2137 case CALL_INSN:
2138 /* Can handle a call insn if there are no insns after it.
2139 IE: The next "insn" is the target label. We don't have to
2140 worry about delay slots as such insns are SEQUENCE's inside
2141 INSN's. ??? It is possible to handle such insns though. */
2142 if (get_attr_cond (this_insn) == COND_CANUSE)
2143 next_must_be_target_label_p = TRUE;
2144 else
2145 fail = TRUE;
2146 break;
2148 case JUMP_INSN:
2149 /* If this is an unconditional branch to the same label, succeed.
2150 If it is to another label, do nothing. If it is conditional,
2151 fail. */
2152 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2154 if (GET_CODE (scanbody) == SET
2155 && GET_CODE (SET_DEST (scanbody)) == PC)
2157 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2158 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2160 arc_ccfsm_state = 2;
2161 succeed = TRUE;
2163 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2164 fail = TRUE;
2166 else if (GET_CODE (scanbody) == RETURN
2167 && seeking_return)
2169 arc_ccfsm_state = 2;
2170 succeed = TRUE;
2172 else if (GET_CODE (scanbody) == PARALLEL)
2174 if (get_attr_cond (this_insn) != COND_CANUSE)
2175 fail = TRUE;
2177 break;
2179 case INSN:
2180 /* We can only do this with insns that can use the condition
2181 codes (and don't set them). */
2182 if (GET_CODE (scanbody) == SET
2183 || GET_CODE (scanbody) == PARALLEL)
2185 if (get_attr_cond (this_insn) != COND_CANUSE)
2186 fail = TRUE;
2188 /* We can't handle other insns like sequences. */
2189 else
2190 fail = TRUE;
2191 break;
2193 default:
2194 break;
2198 if (succeed)
2200 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2201 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2202 else
2204 gcc_assert (seeking_return || arc_ccfsm_state == 2);
2205 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2207 this_insn = next_nonnote_insn (this_insn);
2208 gcc_assert (!this_insn
2209 || (GET_CODE (this_insn) != BARRIER
2210 && GET_CODE (this_insn) != CODE_LABEL));
2212 if (!this_insn)
2214 /* Oh dear! we ran off the end, give up. */
2215 extract_insn_cached (insn);
2216 arc_ccfsm_state = 0;
2217 arc_ccfsm_target_insn = NULL;
2218 return;
2220 arc_ccfsm_target_insn = this_insn;
2223 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2224 what it was. */
2225 if (!reverse)
2226 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2227 0));
2229 if (reverse || then_not_else)
2230 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2233 /* Restore recog_data. Getting the attributes of other insns can
2234 destroy this array, but final.c assumes that it remains intact
2235 across this call. */
2236 extract_insn_cached (insn);
2240 /* Record that we are currently outputting label NUM with prefix PREFIX.
2241 It it's the label we're looking for, reset the ccfsm machinery.
2243 Called from (*targetm.asm_out.internal_label). */
2245 void
2246 arc_ccfsm_at_label (const char *prefix, int num)
2248 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2249 && !strcmp (prefix, "L"))
2251 arc_ccfsm_state = 0;
2252 arc_ccfsm_target_insn = NULL_RTX;
2256 /* See if the current insn, which is a conditional branch, is to be
2257 deleted. */
2260 arc_ccfsm_branch_deleted_p (void)
2262 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2263 return 1;
2264 return 0;
2267 /* Record a branch isn't output because subsequent insns can be
2268 conditionalized. */
2270 void
2271 arc_ccfsm_record_branch_deleted (void)
2273 /* Indicate we're conditionalizing insns now. */
2274 arc_ccfsm_state += 2;
2276 /* If the next insn is a subroutine call, we still need a nop between the
2277 cc setter and user. We need to undo the effect of calling record_cc_ref
2278 for the just deleted branch. */
2279 current_insn_set_cc_p = last_insn_set_cc_p;
2282 static void
2283 arc_va_start (tree valist, rtx nextarg)
2285 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2286 if (crtl->args.info < 8
2287 && (crtl->args.info & 1))
2288 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2290 std_expand_builtin_va_start (valist, nextarg);
2293 /* This is how to output a definition of an internal numbered label where
2294 PREFIX is the class of label and NUM is the number within the class. */
2296 static void
2297 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2299 arc_ccfsm_at_label (prefix, labelno);
2300 default_internal_label (stream, prefix, labelno);
2303 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2305 static void
2306 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2308 #if 0
2309 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2310 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2311 and we'll get another suffix added on if -mmangle-cpu. */
2312 if (TARGET_MANGLE_CPU_LIBGCC)
2314 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2315 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2316 arc_mangle_suffix);
2318 #endif
2321 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2323 static bool
2324 arc_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2326 if (AGGREGATE_TYPE_P (type))
2327 return true;
2328 else
2330 HOST_WIDE_INT size = int_size_in_bytes (type);
2331 return (size == -1 || size > 8);
2335 /* For ARC, All aggregates and arguments greater than 8 bytes are
2336 passed by reference. */
2338 static bool
2339 arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2340 enum machine_mode mode, const_tree type,
2341 bool named ATTRIBUTE_UNUSED)
2343 unsigned HOST_WIDE_INT size;
2345 if (type)
2347 if (AGGREGATE_TYPE_P (type))
2348 return true;
2349 size = int_size_in_bytes (type);
2351 else
2352 size = GET_MODE_SIZE (mode);
2354 return size > 8;
2357 /* Trampolines. */
2358 /* ??? This doesn't work yet because GCC will use as the address of a nested
2359 function the address of the trampoline. We need to use that address
2360 right shifted by 2. It looks like we'll need PSImode after all. :-(
2362 ??? The above comment sounds like it's doable via
2363 TARGET_TRAMPOLINE_ADJUST_ADDRESS; no PSImode needed.
2365 On the ARC, the trampoline is quite simple as we have 32-bit immediate
2366 constants.
2368 mov r24,STATIC
2369 j.nd FUNCTION
2372 static void
2373 arc_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2375 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2376 rtx mem;
2378 mem = adjust_address (m_tramp, SImode, 0);
2379 emit_move_insn (mem, GEN_INT (0x631f7c00));
2381 mem = adjust_address (m_tramp, SImode, 4);
2382 emit_move_insn (mem, chain_value);
2384 mem = adjust_address (m_tramp, SImode, 8);
2385 emit_move_insn (mem, GEN_INT (0x381f0000));
2387 mem = adjust_address (m_tramp, SImode, 12);
2388 emit_move_insn (mem, fnaddr);
2390 emit_insn (gen_flush_icache (m_tramp));