* defaults.h (FRAME_GROWS_DOWNWARD): Define to 0 if not defined.
[official-gcc.git] / gcc / config / arc / arc.c
blob350a6ef5386cf47ca93eee68b7232d678bcc4997
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "recog.h"
42 #include "toplev.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
47 /* Which cpu we're compiling for. */
48 int arc_cpu_type;
50 /* Name of mangle string to add to symbols to separate code compiled for each
51 cpu (or NULL). */
52 const char *arc_mangle_cpu;
54 /* Save the operands last given to a compare for use when we
55 generate a scc or bcc insn. */
56 rtx arc_compare_op0, arc_compare_op1;
58 /* Name of text, data, and rodata sections used in varasm.c. */
59 const char *arc_text_section;
60 const char *arc_data_section;
61 const char *arc_rodata_section;
63 /* Array of valid operand punctuation characters. */
64 char arc_punct_chars[256];
66 /* Variables used by arc_final_prescan_insn to implement conditional
67 execution. */
68 static int arc_ccfsm_state;
69 static int arc_ccfsm_current_cc;
70 static rtx arc_ccfsm_target_insn;
71 static int arc_ccfsm_target_label;
73 /* The maximum number of insns skipped which will be conditionalised if
74 possible. */
75 #define MAX_INSNS_SKIPPED 3
77 /* A nop is needed between a 4 byte insn that sets the condition codes and
78 a branch that uses them (the same isn't true for an 8 byte insn that sets
79 the condition codes). Set by arc_final_prescan_insn. Used by
80 arc_print_operand. */
81 static int last_insn_set_cc_p;
82 static int current_insn_set_cc_p;
83 static bool arc_handle_option (size_t, const char *, int);
84 static void record_cc_ref (rtx);
85 static void arc_init_reg_tables (void);
86 static int get_arc_condition_code (rtx);
87 const struct attribute_spec arc_attribute_table[];
88 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
89 static bool arc_assemble_integer (rtx, unsigned int, int);
90 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
91 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
92 static void arc_file_start (void);
93 static void arc_internal_label (FILE *, const char *, unsigned long);
94 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
95 tree, int *, int);
96 static bool arc_rtx_costs (rtx, int, int, int *);
97 static int arc_address_cost (rtx);
98 static void arc_external_libcall (rtx);
99 static bool arc_return_in_memory (tree, tree);
100 static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
101 tree, bool);
103 /* Initialize the GCC target structure. */
104 #undef TARGET_ASM_ALIGNED_HI_OP
105 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
106 #undef TARGET_ASM_ALIGNED_SI_OP
107 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
108 #undef TARGET_ASM_INTEGER
109 #define TARGET_ASM_INTEGER arc_assemble_integer
111 #undef TARGET_ASM_FUNCTION_PROLOGUE
112 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
113 #undef TARGET_ASM_FUNCTION_EPILOGUE
114 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
115 #undef TARGET_ASM_FILE_START
116 #define TARGET_ASM_FILE_START arc_file_start
117 #undef TARGET_ATTRIBUTE_TABLE
118 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
119 #undef TARGET_ASM_INTERNAL_LABEL
120 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
121 #undef TARGET_ASM_EXTERNAL_LIBCALL
122 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
124 #undef TARGET_HANDLE_OPTION
125 #define TARGET_HANDLE_OPTION arc_handle_option
127 #undef TARGET_RTX_COSTS
128 #define TARGET_RTX_COSTS arc_rtx_costs
129 #undef TARGET_ADDRESS_COST
130 #define TARGET_ADDRESS_COST arc_address_cost
132 #undef TARGET_PROMOTE_FUNCTION_ARGS
133 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
134 #undef TARGET_PROMOTE_FUNCTION_RETURN
135 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
136 #undef TARGET_PROMOTE_PROTOTYPES
137 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
139 #undef TARGET_RETURN_IN_MEMORY
140 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
141 #undef TARGET_PASS_BY_REFERENCE
142 #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
143 #undef TARGET_CALLEE_COPIES
144 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
146 #undef TARGET_SETUP_INCOMING_VARARGS
147 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
149 struct gcc_target targetm = TARGET_INITIALIZER;
151 /* Implement TARGET_HANDLE_OPTION. */
153 static bool
154 arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
156 switch (code)
158 case OPT_mcpu_:
159 return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
161 default:
162 return true;
166 /* Called by OVERRIDE_OPTIONS to initialize various things. */
168 void
169 arc_init (void)
171 char *tmp;
173 /* Set the pseudo-ops for the various standard sections. */
174 arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
175 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
176 arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
177 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
178 arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
179 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
181 arc_init_reg_tables ();
183 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
184 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
185 arc_punct_chars['#'] = 1;
186 arc_punct_chars['*'] = 1;
187 arc_punct_chars['?'] = 1;
188 arc_punct_chars['!'] = 1;
189 arc_punct_chars['~'] = 1;
192 /* The condition codes of the ARC, and the inverse function. */
193 static const char *const arc_condition_codes[] =
195 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
196 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
199 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
201 /* Returns the index of the ARC condition code string in
202 `arc_condition_codes'. COMPARISON should be an rtx like
203 `(eq (...) (...))'. */
205 static int
206 get_arc_condition_code (rtx comparison)
208 switch (GET_CODE (comparison))
210 case EQ : return 2;
211 case NE : return 3;
212 case GT : return 10;
213 case LE : return 11;
214 case GE : return 12;
215 case LT : return 13;
216 case GTU : return 14;
217 case LEU : return 15;
218 case LTU : return 6;
219 case GEU : return 7;
220 default : gcc_unreachable ();
222 /*NOTREACHED*/
223 return (42);
226 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
227 return the mode to be used for the comparison. */
229 enum machine_mode
230 arc_select_cc_mode (enum rtx_code op,
231 rtx x ATTRIBUTE_UNUSED,
232 rtx y ATTRIBUTE_UNUSED)
234 switch (op)
236 case EQ :
237 case NE :
238 return CCZNmode;
239 default :
240 switch (GET_CODE (x))
242 case AND :
243 case IOR :
244 case XOR :
245 case SIGN_EXTEND :
246 case ZERO_EXTEND :
247 return CCZNmode;
248 case ASHIFT :
249 case ASHIFTRT :
250 case LSHIFTRT :
251 return CCZNCmode;
252 default:
253 break;
256 return CCmode;
259 /* Vectors to keep interesting information about registers where it can easily
260 be got. We use to use the actual mode value as the bit number, but there
261 is (or may be) more than 32 modes now. Instead we use two tables: one
262 indexed by hard register number, and one indexed by mode. */
264 /* The purpose of arc_mode_class is to shrink the range of modes so that
265 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
266 mapped into one arc_mode_class mode. */
268 enum arc_mode_class {
269 C_MODE,
270 S_MODE, D_MODE, T_MODE, O_MODE,
271 SF_MODE, DF_MODE, TF_MODE, OF_MODE
274 /* Modes for condition codes. */
275 #define C_MODES (1 << (int) C_MODE)
277 /* Modes for single-word and smaller quantities. */
278 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
280 /* Modes for double-word and smaller quantities. */
281 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
283 /* Modes for quad-word and smaller quantities. */
284 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
286 /* Value is 1 if register/mode pair is acceptable on arc. */
288 const unsigned int arc_hard_regno_mode_ok[] = {
289 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
290 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
291 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
292 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
294 /* ??? Leave these as S_MODES for now. */
295 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
296 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
297 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
298 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
301 unsigned int arc_mode_class [NUM_MACHINE_MODES];
303 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
305 static void
306 arc_init_reg_tables (void)
308 int i;
310 for (i = 0; i < NUM_MACHINE_MODES; i++)
312 switch (GET_MODE_CLASS (i))
314 case MODE_INT:
315 case MODE_PARTIAL_INT:
316 case MODE_COMPLEX_INT:
317 if (GET_MODE_SIZE (i) <= 4)
318 arc_mode_class[i] = 1 << (int) S_MODE;
319 else if (GET_MODE_SIZE (i) == 8)
320 arc_mode_class[i] = 1 << (int) D_MODE;
321 else if (GET_MODE_SIZE (i) == 16)
322 arc_mode_class[i] = 1 << (int) T_MODE;
323 else if (GET_MODE_SIZE (i) == 32)
324 arc_mode_class[i] = 1 << (int) O_MODE;
325 else
326 arc_mode_class[i] = 0;
327 break;
328 case MODE_FLOAT:
329 case MODE_COMPLEX_FLOAT:
330 if (GET_MODE_SIZE (i) <= 4)
331 arc_mode_class[i] = 1 << (int) SF_MODE;
332 else if (GET_MODE_SIZE (i) == 8)
333 arc_mode_class[i] = 1 << (int) DF_MODE;
334 else if (GET_MODE_SIZE (i) == 16)
335 arc_mode_class[i] = 1 << (int) TF_MODE;
336 else if (GET_MODE_SIZE (i) == 32)
337 arc_mode_class[i] = 1 << (int) OF_MODE;
338 else
339 arc_mode_class[i] = 0;
340 break;
341 case MODE_CC:
342 arc_mode_class[i] = 1 << (int) C_MODE;
343 break;
344 default:
345 arc_mode_class[i] = 0;
346 break;
350 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
352 if (i < 60)
353 arc_regno_reg_class[i] = GENERAL_REGS;
354 else if (i == 60)
355 arc_regno_reg_class[i] = LPCOUNT_REG;
356 else if (i == 61)
357 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
358 else
359 arc_regno_reg_class[i] = NO_REGS;
363 /* ARC specific attribute support.
365 The ARC has these attributes:
366 interrupt - for interrupt functions
369 const struct attribute_spec arc_attribute_table[] =
371 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
372 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
373 { NULL, 0, 0, false, false, false, NULL }
376 /* Handle an "interrupt" attribute; arguments as in
377 struct attribute_spec.handler. */
378 static tree
379 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
380 tree name,
381 tree args,
382 int flags ATTRIBUTE_UNUSED,
383 bool *no_add_attrs)
385 tree value = TREE_VALUE (args);
387 if (TREE_CODE (value) != STRING_CST)
389 warning (OPT_Wattributes,
390 "argument of %qs attribute is not a string constant",
391 IDENTIFIER_POINTER (name));
392 *no_add_attrs = true;
394 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
395 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
397 warning (OPT_Wattributes,
398 "argument of %qs attribute is not \"ilink1\" or \"ilink2\"",
399 IDENTIFIER_POINTER (name));
400 *no_add_attrs = true;
403 return NULL_TREE;
407 /* Acceptable arguments to the call insn. */
410 call_address_operand (rtx op, enum machine_mode mode)
412 return (symbolic_operand (op, mode)
413 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
414 || (GET_CODE (op) == REG));
418 call_operand (rtx op, enum machine_mode mode)
420 if (GET_CODE (op) != MEM)
421 return 0;
422 op = XEXP (op, 0);
423 return call_address_operand (op, mode);
426 /* Returns 1 if OP is a symbol reference. */
429 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
431 switch (GET_CODE (op))
433 case SYMBOL_REF:
434 case LABEL_REF:
435 case CONST :
436 return 1;
437 default:
438 return 0;
442 /* Return truth value of statement that OP is a symbolic memory
443 operand of mode MODE. */
446 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
448 if (GET_CODE (op) == SUBREG)
449 op = SUBREG_REG (op);
450 if (GET_CODE (op) != MEM)
451 return 0;
452 op = XEXP (op, 0);
453 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
454 || GET_CODE (op) == LABEL_REF);
457 /* Return true if OP is a short immediate (shimm) value. */
460 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
462 if (GET_CODE (op) != CONST_INT)
463 return 0;
464 return SMALL_INT (INTVAL (op));
467 /* Return true if OP will require a long immediate (limm) value.
468 This is currently only used when calculating length attributes. */
471 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
473 switch (GET_CODE (op))
475 case SYMBOL_REF :
476 case LABEL_REF :
477 case CONST :
478 return 1;
479 case CONST_INT :
480 return !SMALL_INT (INTVAL (op));
481 case CONST_DOUBLE :
482 /* These can happen because large unsigned 32 bit constants are
483 represented this way (the multiplication patterns can cause these
484 to be generated). They also occur for SFmode values. */
485 return 1;
486 default:
487 break;
489 return 0;
492 /* Return true if OP is a MEM that when used as a load or store address will
493 require an 8 byte insn.
494 Load and store instructions don't allow the same possibilities but they're
495 similar enough that this one function will do.
496 This is currently only used when calculating length attributes. */
499 long_immediate_loadstore_operand (rtx op,
500 enum machine_mode mode ATTRIBUTE_UNUSED)
502 if (GET_CODE (op) != MEM)
503 return 0;
505 op = XEXP (op, 0);
506 switch (GET_CODE (op))
508 case SYMBOL_REF :
509 case LABEL_REF :
510 case CONST :
511 return 1;
512 case CONST_INT :
513 /* This must be handled as "st c,[limm]". Ditto for load.
514 Technically, the assembler could translate some possibilities to
515 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
516 assume that it does. */
517 return 1;
518 case CONST_DOUBLE :
519 /* These can happen because large unsigned 32 bit constants are
520 represented this way (the multiplication patterns can cause these
521 to be generated). They also occur for SFmode values. */
522 return 1;
523 case REG :
524 return 0;
525 case PLUS :
526 if (GET_CODE (XEXP (op, 1)) == CONST_INT
527 && !SMALL_INT (INTVAL (XEXP (op, 1))))
528 return 1;
529 return 0;
530 default:
531 break;
533 return 0;
536 /* Return true if OP is an acceptable argument for a single word
537 move source. */
540 move_src_operand (rtx op, enum machine_mode mode)
542 switch (GET_CODE (op))
544 case SYMBOL_REF :
545 case LABEL_REF :
546 case CONST :
547 return 1;
548 case CONST_INT :
549 return (LARGE_INT (INTVAL (op)));
550 case CONST_DOUBLE :
551 /* We can handle DImode integer constants in SImode if the value
552 (signed or unsigned) will fit in 32 bits. This is needed because
553 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
554 if (mode == SImode)
555 return arc_double_limm_p (op);
556 /* We can handle 32 bit floating point constants. */
557 if (mode == SFmode)
558 return GET_MODE (op) == SFmode;
559 return 0;
560 case REG :
561 return register_operand (op, mode);
562 case SUBREG :
563 /* (subreg (mem ...) ...) can occur here if the inner part was once a
564 pseudo-reg and is now a stack slot. */
565 if (GET_CODE (SUBREG_REG (op)) == MEM)
566 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
567 else
568 return register_operand (op, mode);
569 case MEM :
570 return address_operand (XEXP (op, 0), mode);
571 default :
572 return 0;
576 /* Return true if OP is an acceptable argument for a double word
577 move source. */
580 move_double_src_operand (rtx op, enum machine_mode mode)
582 switch (GET_CODE (op))
584 case REG :
585 return register_operand (op, mode);
586 case SUBREG :
587 /* (subreg (mem ...) ...) can occur here if the inner part was once a
588 pseudo-reg and is now a stack slot. */
589 if (GET_CODE (SUBREG_REG (op)) == MEM)
590 return move_double_src_operand (SUBREG_REG (op), mode);
591 else
592 return register_operand (op, mode);
593 case MEM :
594 /* Disallow auto inc/dec for now. */
595 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
596 || GET_CODE (XEXP (op, 0)) == PRE_INC)
597 return 0;
598 return address_operand (XEXP (op, 0), mode);
599 case CONST_INT :
600 case CONST_DOUBLE :
601 return 1;
602 default :
603 return 0;
607 /* Return true if OP is an acceptable argument for a move destination. */
610 move_dest_operand (rtx op, enum machine_mode mode)
612 switch (GET_CODE (op))
614 case REG :
615 return register_operand (op, mode);
616 case SUBREG :
617 /* (subreg (mem ...) ...) can occur here if the inner part was once a
618 pseudo-reg and is now a stack slot. */
619 if (GET_CODE (SUBREG_REG (op)) == MEM)
620 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
621 else
622 return register_operand (op, mode);
623 case MEM :
624 return address_operand (XEXP (op, 0), mode);
625 default :
626 return 0;
630 /* Return true if OP is valid load with update operand. */
633 load_update_operand (rtx op, enum machine_mode mode)
635 if (GET_CODE (op) != MEM
636 || GET_MODE (op) != mode)
637 return 0;
638 op = XEXP (op, 0);
639 if (GET_CODE (op) != PLUS
640 || GET_MODE (op) != Pmode
641 || !register_operand (XEXP (op, 0), Pmode)
642 || !nonmemory_operand (XEXP (op, 1), Pmode))
643 return 0;
644 return 1;
647 /* Return true if OP is valid store with update operand. */
650 store_update_operand (rtx op, enum machine_mode mode)
652 if (GET_CODE (op) != MEM
653 || GET_MODE (op) != mode)
654 return 0;
655 op = XEXP (op, 0);
656 if (GET_CODE (op) != PLUS
657 || GET_MODE (op) != Pmode
658 || !register_operand (XEXP (op, 0), Pmode)
659 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
660 && SMALL_INT (INTVAL (XEXP (op, 1)))))
661 return 0;
662 return 1;
665 /* Return true if OP is a non-volatile non-immediate operand.
666 Volatile memory refs require a special "cache-bypass" instruction
667 and only the standard movXX patterns are set up to handle them. */
670 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
672 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
673 return 0;
674 return nonimmediate_operand (op, mode);
677 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
678 to check the range carefully since this predicate is used in DImode
679 contexts. */
682 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
684 /* All allowed constants will fit a CONST_INT. */
685 return (GET_CODE (op) == CONST_INT
686 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
689 /* Accept integer operands in the range 0..0xffffffff. We have to check the
690 range carefully since this predicate is used in DImode contexts. Also, we
691 need some extra crud to make it work when hosted on 64-bit machines. */
694 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
696 #if HOST_BITS_PER_WIDE_INT > 32
697 /* All allowed constants will fit a CONST_INT. */
698 return (GET_CODE (op) == CONST_INT
699 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
700 #else
701 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
702 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
703 #endif
706 /* Return 1 if OP is a comparison operator valid for the mode of CC.
707 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
709 Some insns only set a few bits in the condition code. So only allow those
710 comparisons that use the bits that are valid. */
713 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
715 enum rtx_code code;
716 if (!COMPARISON_P (op))
717 return 0;
719 code = GET_CODE (op);
720 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
721 return (code == EQ || code == NE);
722 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
723 return (code == EQ || code == NE
724 || code == LTU || code == GEU || code == GTU || code == LEU);
725 return 1;
728 /* Misc. utilities. */
730 /* X and Y are two things to compare using CODE. Emit the compare insn and
731 return the rtx for the cc reg in the proper mode. */
734 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
736 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
737 rtx cc_reg;
739 cc_reg = gen_rtx_REG (mode, 61);
741 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
742 gen_rtx_COMPARE (mode, x, y)));
744 return cc_reg;
747 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
748 We assume the value can be either signed or unsigned. */
751 arc_double_limm_p (rtx value)
753 HOST_WIDE_INT low, high;
755 gcc_assert (GET_CODE (value) == CONST_DOUBLE);
757 low = CONST_DOUBLE_LOW (value);
758 high = CONST_DOUBLE_HIGH (value);
760 if (low & 0x80000000)
762 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
763 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
764 == - (unsigned HOST_WIDE_INT) 0x80000000)
765 && high == -1));
767 else
769 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
773 /* Do any needed setup for a variadic function. For the ARC, we must
774 create a register parameter block, and then copy any anonymous arguments
775 in registers to memory.
777 CUM has not been updated for the last named argument which has type TYPE
778 and mode MODE, and we rely on this fact.
780 We do things a little weird here. We're supposed to only allocate space
781 for the anonymous arguments. However we need to keep the stack eight byte
782 aligned. So we round the space up if necessary, and leave it to va_start
783 to compensate. */
785 static void
786 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
787 enum machine_mode mode,
788 tree type ATTRIBUTE_UNUSED,
789 int *pretend_size,
790 int no_rtl)
792 int first_anon_arg;
794 /* All BLKmode values are passed by reference. */
795 gcc_assert (mode != BLKmode);
797 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
798 / UNITS_PER_WORD);
800 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
802 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
803 int first_reg_offset = first_anon_arg;
804 /* Size in words to "pretend" allocate. */
805 int size = MAX_ARC_PARM_REGS - first_reg_offset;
806 /* Extra slop to keep stack eight byte aligned. */
807 int align_slop = size & 1;
808 rtx regblock;
810 regblock = gen_rtx_MEM (BLKmode,
811 plus_constant (arg_pointer_rtx,
812 FIRST_PARM_OFFSET (0)
813 + align_slop * UNITS_PER_WORD));
814 set_mem_alias_set (regblock, get_varargs_alias_set ());
815 set_mem_align (regblock, BITS_PER_WORD);
816 move_block_from_reg (first_reg_offset, regblock,
817 MAX_ARC_PARM_REGS - first_reg_offset);
819 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
820 * UNITS_PER_WORD);
824 /* Cost functions. */
826 /* Compute a (partial) cost for rtx X. Return true if the complete
827 cost has been computed, and false if subexpressions should be
828 scanned. In either case, *TOTAL contains the cost result. */
830 static bool
831 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
833 switch (code)
835 /* Small integers are as cheap as registers. 4 byte values can
836 be fetched as immediate constants - let's give that the cost
837 of an extra insn. */
838 case CONST_INT:
839 if (SMALL_INT (INTVAL (x)))
841 *total = 0;
842 return true;
844 /* FALLTHRU */
846 case CONST:
847 case LABEL_REF:
848 case SYMBOL_REF:
849 *total = COSTS_N_INSNS (1);
850 return true;
852 case CONST_DOUBLE:
854 rtx high, low;
855 split_double (x, &high, &low);
856 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
857 + !SMALL_INT (INTVAL (low)));
858 return true;
861 /* Encourage synth_mult to find a synthetic multiply when reasonable.
862 If we need more than 12 insns to do a multiply, then go out-of-line,
863 since the call overhead will be < 10% of the cost of the multiply. */
864 case ASHIFT:
865 case ASHIFTRT:
866 case LSHIFTRT:
867 if (TARGET_SHIFTER)
868 *total = COSTS_N_INSNS (1);
869 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
870 *total = COSTS_N_INSNS (16);
871 else
872 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
873 return false;
875 default:
876 return false;
881 /* Provide the costs of an addressing mode that contains ADDR.
882 If ADDR is not a valid address, its cost is irrelevant. */
884 static int
885 arc_address_cost (rtx addr)
887 switch (GET_CODE (addr))
889 case REG :
890 return 1;
892 case LABEL_REF :
893 case SYMBOL_REF :
894 case CONST :
895 return 2;
897 case PLUS :
899 register rtx plus0 = XEXP (addr, 0);
900 register rtx plus1 = XEXP (addr, 1);
902 if (GET_CODE (plus0) != REG)
903 break;
905 switch (GET_CODE (plus1))
907 case CONST_INT :
908 return SMALL_INT (plus1) ? 1 : 2;
909 case CONST :
910 case SYMBOL_REF :
911 case LABEL_REF :
912 return 2;
913 default:
914 break;
916 break;
918 default:
919 break;
922 return 4;
925 /* Function prologue/epilogue handlers. */
927 /* ARC stack frames look like:
929 Before call After call
930 +-----------------------+ +-----------------------+
931 | | | |
932 high | local variables, | | local variables, |
933 mem | reg save area, etc. | | reg save area, etc. |
934 | | | |
935 +-----------------------+ +-----------------------+
936 | | | |
937 | arguments on stack. | | arguments on stack. |
938 | | | |
939 SP+16->+-----------------------+FP+48->+-----------------------+
940 | 4 word save area for | | reg parm save area, |
941 | return addr, prev %fp | | only created for |
942 SP+0->+-----------------------+ | variable argument |
943 | functions |
944 FP+16->+-----------------------+
945 | 4 word save area for |
946 | return addr, prev %fp |
947 FP+0->+-----------------------+
948 | |
949 | local variables |
950 | |
951 +-----------------------+
952 | |
953 | register save area |
954 | |
955 +-----------------------+
956 | |
957 | alloca allocations |
958 | |
959 +-----------------------+
960 | |
961 | arguments on stack |
962 | |
963 SP+16->+-----------------------+
964 low | 4 word save area for |
965 memory | return addr, prev %fp |
966 SP+0->+-----------------------+
968 Notes:
969 1) The "reg parm save area" does not exist for non variable argument fns.
970 The "reg parm save area" can be eliminated completely if we created our
971 own va-arc.h, but that has tradeoffs as well (so it's not done). */
973 /* Structure to be filled in by arc_compute_frame_size with register
974 save masks, and offsets for the current function. */
975 struct arc_frame_info
977 unsigned int total_size; /* # bytes that the entire frame takes up. */
978 unsigned int extra_size; /* # bytes of extra stuff. */
979 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
980 unsigned int args_size; /* # bytes that outgoing arguments take up. */
981 unsigned int reg_size; /* # bytes needed to store regs. */
982 unsigned int var_size; /* # bytes that variables take up. */
983 unsigned int reg_offset; /* Offset from new sp to store regs. */
984 unsigned int gmask; /* Mask of saved gp registers. */
985 int initialized; /* Nonzero if frame size already calculated. */
988 /* Current frame information calculated by arc_compute_frame_size. */
989 static struct arc_frame_info current_frame_info;
991 /* Zero structure to initialize current_frame_info. */
992 static struct arc_frame_info zero_frame_info;
994 /* Type of function DECL.
996 The result is cached. To reset the cache at the end of a function,
997 call with DECL = NULL_TREE. */
999 enum arc_function_type
1000 arc_compute_function_type (tree decl)
1002 tree a;
1003 /* Cached value. */
1004 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1005 /* Last function we were called for. */
1006 static tree last_fn = NULL_TREE;
1008 /* Resetting the cached value? */
1009 if (decl == NULL_TREE)
1011 fn_type = ARC_FUNCTION_UNKNOWN;
1012 last_fn = NULL_TREE;
1013 return fn_type;
1016 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1017 return fn_type;
1019 /* Assume we have a normal function (not an interrupt handler). */
1020 fn_type = ARC_FUNCTION_NORMAL;
1022 /* Now see if this is an interrupt handler. */
1023 for (a = DECL_ATTRIBUTES (current_function_decl);
1025 a = TREE_CHAIN (a))
1027 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1029 if (name == get_identifier ("__interrupt__")
1030 && list_length (args) == 1
1031 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1033 tree value = TREE_VALUE (args);
1035 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1036 fn_type = ARC_FUNCTION_ILINK1;
1037 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1038 fn_type = ARC_FUNCTION_ILINK2;
1039 else
1040 gcc_unreachable ();
1041 break;
1045 last_fn = decl;
1046 return fn_type;
1049 #define ILINK1_REGNUM 29
1050 #define ILINK2_REGNUM 30
1051 #define RETURN_ADDR_REGNUM 31
1052 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1053 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1055 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1056 The return address and frame pointer are treated separately.
1057 Don't consider them here. */
1058 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1059 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1060 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1062 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1064 /* Return the bytes needed to compute the frame pointer from the current
1065 stack pointer.
1067 SIZE is the size needed for local variables. */
1069 unsigned int
1070 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1072 int regno;
1073 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1074 unsigned int reg_size, reg_offset;
1075 unsigned int gmask;
1076 enum arc_function_type fn_type;
1077 int interrupt_p;
1079 var_size = size;
1080 args_size = current_function_outgoing_args_size;
1081 pretend_size = current_function_pretend_args_size;
1082 extra_size = FIRST_PARM_OFFSET (0);
1083 total_size = extra_size + pretend_size + args_size + var_size;
1084 reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1085 reg_size = 0;
1086 gmask = 0;
1088 /* See if this is an interrupt handler. Call used registers must be saved
1089 for them too. */
1090 fn_type = arc_compute_function_type (current_function_decl);
1091 interrupt_p = ARC_INTERRUPT_P (fn_type);
1093 /* Calculate space needed for registers.
1094 ??? We ignore the extension registers for now. */
1096 for (regno = 0; regno <= 31; regno++)
1098 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1100 reg_size += UNITS_PER_WORD;
1101 gmask |= 1 << regno;
1105 total_size += reg_size;
1107 /* If the only space to allocate is the fp/blink save area this is an
1108 empty frame. However, if we'll be making a function call we need to
1109 allocate a stack frame for our callee's fp/blink save area. */
1110 if (total_size == extra_size
1111 && !MUST_SAVE_RETURN_ADDR)
1112 total_size = extra_size = 0;
1114 total_size = ARC_STACK_ALIGN (total_size);
1116 /* Save computed information. */
1117 current_frame_info.total_size = total_size;
1118 current_frame_info.extra_size = extra_size;
1119 current_frame_info.pretend_size = pretend_size;
1120 current_frame_info.var_size = var_size;
1121 current_frame_info.args_size = args_size;
1122 current_frame_info.reg_size = reg_size;
1123 current_frame_info.reg_offset = reg_offset;
1124 current_frame_info.gmask = gmask;
1125 current_frame_info.initialized = reload_completed;
1127 /* Ok, we're done. */
1128 return total_size;
1131 /* Common code to save/restore registers. */
1133 void
1134 arc_save_restore (FILE *file,
1135 const char *base_reg,
1136 unsigned int offset,
1137 unsigned int gmask,
1138 const char *op)
1140 int regno;
1142 if (gmask == 0)
1143 return;
1145 for (regno = 0; regno <= 31; regno++)
1147 if ((gmask & (1L << regno)) != 0)
1149 fprintf (file, "\t%s %s,[%s,%d]\n",
1150 op, reg_names[regno], base_reg, offset);
1151 offset += UNITS_PER_WORD;
1156 /* Target hook to assemble an integer object. The ARC version needs to
1157 emit a special directive for references to labels and function
1158 symbols. */
1160 static bool
1161 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1163 if (size == UNITS_PER_WORD && aligned_p
1164 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1165 || GET_CODE (x) == LABEL_REF))
1167 fputs ("\t.word\t%st(", asm_out_file);
1168 output_addr_const (asm_out_file, x);
1169 fputs (")\n", asm_out_file);
1170 return true;
1172 return default_assemble_integer (x, size, aligned_p);
1175 /* Set up the stack and frame pointer (if desired) for the function. */
1177 static void
1178 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1180 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1181 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1182 unsigned int gmask = current_frame_info.gmask;
1183 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1185 /* If this is an interrupt handler, set up our stack frame.
1186 ??? Optimize later. */
1187 if (ARC_INTERRUPT_P (fn_type))
1189 fprintf (file, "\t%s interrupt handler\n",
1190 ASM_COMMENT_START);
1191 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1194 /* This is only for the human reader. */
1195 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1196 ASM_COMMENT_START, ASM_COMMENT_START,
1197 current_frame_info.var_size,
1198 current_frame_info.reg_size / 4,
1199 current_frame_info.args_size,
1200 current_frame_info.extra_size);
1202 size = ARC_STACK_ALIGN (size);
1203 size = (! current_frame_info.initialized
1204 ? arc_compute_frame_size (size)
1205 : current_frame_info.total_size);
1207 /* These cases shouldn't happen. Catch them now. */
1208 gcc_assert (size || !gmask);
1210 /* Allocate space for register arguments if this is a variadic function. */
1211 if (current_frame_info.pretend_size != 0)
1212 fprintf (file, "\tsub %s,%s,%d\n",
1213 sp_str, sp_str, current_frame_info.pretend_size);
1215 /* The home-grown ABI says link register is saved first. */
1216 if (MUST_SAVE_RETURN_ADDR)
1217 fprintf (file, "\tst %s,[%s,%d]\n",
1218 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1220 /* Set up the previous frame pointer next (if we need to). */
1221 if (frame_pointer_needed)
1223 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1224 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1227 /* ??? We don't handle the case where the saved regs are more than 252
1228 bytes away from sp. This can be handled by decrementing sp once, saving
1229 the regs, and then decrementing it again. The epilogue doesn't have this
1230 problem as the `ld' insn takes reg+limm values (though it would be more
1231 efficient to avoid reg+limm). */
1233 /* Allocate the stack frame. */
1234 if (size - current_frame_info.pretend_size > 0)
1235 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1236 sp_str, sp_str, size - current_frame_info.pretend_size);
1238 /* Save any needed call-saved regs (and call-used if this is an
1239 interrupt handler). */
1240 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1241 /* The zeroing of these two bits is unnecessary,
1242 but leave this in for clarity. */
1243 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1244 "st");
1246 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1249 /* Do any necessary cleanup after a function to restore stack, frame,
1250 and regs. */
1252 static void
1253 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1255 rtx epilogue_delay = current_function_epilogue_delay_list;
1256 int noepilogue = FALSE;
1257 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1259 /* This is only for the human reader. */
1260 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1262 size = ARC_STACK_ALIGN (size);
1263 size = (!current_frame_info.initialized
1264 ? arc_compute_frame_size (size)
1265 : current_frame_info.total_size);
1267 if (size == 0 && epilogue_delay == 0)
1269 rtx insn = get_last_insn ();
1271 /* If the last insn was a BARRIER, we don't have to write any code
1272 because a jump (aka return) was put there. */
1273 if (GET_CODE (insn) == NOTE)
1274 insn = prev_nonnote_insn (insn);
1275 if (insn && GET_CODE (insn) == BARRIER)
1276 noepilogue = TRUE;
1279 if (!noepilogue)
1281 unsigned int pretend_size = current_frame_info.pretend_size;
1282 unsigned int frame_size = size - pretend_size;
1283 int restored, fp_restored_p;
1284 int can_trust_sp_p = !current_function_calls_alloca;
1285 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1286 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1288 /* ??? There are lots of optimizations that can be done here.
1289 EG: Use fp to restore regs if it's closer.
1290 Maybe in time we'll do them all. For now, always restore regs from
1291 sp, but don't restore sp if we don't have to. */
1293 if (!can_trust_sp_p)
1295 gcc_assert (frame_pointer_needed);
1296 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1297 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1300 /* Restore any saved registers. */
1301 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1302 /* The zeroing of these two bits is unnecessary,
1303 but leave this in for clarity. */
1304 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1305 "ld");
1307 if (MUST_SAVE_RETURN_ADDR)
1308 fprintf (file, "\tld %s,[%s,%d]\n",
1309 reg_names[RETURN_ADDR_REGNUM],
1310 frame_pointer_needed ? fp_str : sp_str,
1311 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1313 /* Keep track of how much of the stack pointer we've restored.
1314 It makes the following a lot more readable. */
1315 restored = 0;
1316 fp_restored_p = 0;
1318 /* We try to emit the epilogue delay slot insn right after the load
1319 of the return address register so that it can execute with the
1320 stack intact. Secondly, loads are delayed. */
1321 /* ??? If stack intactness is important, always emit now. */
1322 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1324 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1325 epilogue_delay = NULL_RTX;
1328 if (frame_pointer_needed)
1330 /* Try to restore the frame pointer in the delay slot. We can't,
1331 however, if any of these is true. */
1332 if (epilogue_delay != NULL_RTX
1333 || !SMALL_INT (frame_size)
1334 || pretend_size
1335 || ARC_INTERRUPT_P (fn_type))
1337 /* Note that we restore fp and sp here! */
1338 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1339 restored += frame_size;
1340 fp_restored_p = 1;
1343 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1344 || ARC_INTERRUPT_P (fn_type))
1346 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1347 restored += frame_size;
1350 /* These must be done before the return insn because the delay slot
1351 does the final stack restore. */
1352 if (ARC_INTERRUPT_P (fn_type))
1354 if (epilogue_delay)
1356 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1360 /* Emit the return instruction. */
1362 static const int regs[4] = {
1363 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1365 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1368 /* If the only register saved is the return address, we need a
1369 nop, unless we have an instruction to put into it. Otherwise
1370 we don't since reloading multiple registers doesn't reference
1371 the register being loaded. */
1373 if (ARC_INTERRUPT_P (fn_type))
1374 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1375 else if (epilogue_delay != NULL_RTX)
1377 gcc_assert (!frame_pointer_needed || fp_restored_p);
1378 gcc_assert (restored >= size);
1379 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1381 else if (frame_pointer_needed && !fp_restored_p)
1383 gcc_assert (SMALL_INT (frame_size));
1384 /* Note that we restore fp and sp here! */
1385 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1387 else if (restored < size)
1389 gcc_assert (SMALL_INT (size - restored));
1390 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1391 sp_str, sp_str, size - restored);
1393 else
1394 fprintf (file, "\tnop\n");
1397 /* Reset state info for each function. */
1398 current_frame_info = zero_frame_info;
1399 arc_compute_function_type (NULL_TREE);
1402 /* Define the number of delay slots needed for the function epilogue.
1404 Interrupt handlers can't have any epilogue delay slots (it's always needed
1405 for something else, I think). For normal functions, we have to worry about
1406 using call-saved regs as they'll be restored before the delay slot insn.
1407 Functions with non-empty frames already have enough choices for the epilogue
1408 delay slot so for now we only consider functions with empty frames. */
1411 arc_delay_slots_for_epilogue (void)
1413 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1414 return 0;
1415 if (!current_frame_info.initialized)
1416 (void) arc_compute_frame_size (get_frame_size ());
1417 if (current_frame_info.total_size == 0)
1418 return 1;
1419 return 0;
1422 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1423 Any single length instruction which doesn't reference the stack or frame
1424 pointer or any call-saved register is OK. SLOT will always be 0. */
1427 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1429 gcc_assert (!slot);
1431 if (get_attr_length (trial) == 1
1432 /* If registers where saved, presumably there's more than enough
1433 possibilities for the delay slot. The alternative is something
1434 more complicated (of course, if we expanded the epilogue as rtl
1435 this problem would go away). */
1436 /* ??? Note that this will always be true since only functions with
1437 empty frames have epilogue delay slots. See
1438 arc_delay_slots_for_epilogue. */
1439 && current_frame_info.gmask == 0
1440 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1441 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1442 return 1;
1443 return 0;
1446 /* PIC */
1448 /* Emit special PIC prologues and epilogues. */
1450 void
1451 arc_finalize_pic (void)
1453 /* nothing to do */
1456 /* Return true if OP is a shift operator. */
1459 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1461 switch (GET_CODE (op))
1463 case ASHIFTRT:
1464 case LSHIFTRT:
1465 case ASHIFT:
1466 return 1;
1467 default:
1468 return 0;
1472 /* Output the assembler code for doing a shift.
1473 We go to a bit of trouble to generate efficient code as the ARC only has
1474 single bit shifts. This is taken from the h8300 port. We only have one
1475 mode of shifting and can't access individual bytes like the h8300 can, so
1476 this is greatly simplified (at the expense of not generating hyper-
1477 efficient code).
1479 This function is not used if the variable shift insns are present. */
1481 /* ??? We assume the output operand is the same as operand 1.
1482 This can be optimized (deleted) in the case of 1 bit shifts. */
1483 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1484 using it here will give us a chance to play with it. */
1486 const char *
1487 output_shift (rtx *operands)
1489 rtx shift = operands[3];
1490 enum machine_mode mode = GET_MODE (shift);
1491 enum rtx_code code = GET_CODE (shift);
1492 const char *shift_one;
1494 gcc_assert (mode == SImode);
1496 switch (code)
1498 case ASHIFT: shift_one = "asl %0,%0"; break;
1499 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1500 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1501 default: gcc_unreachable ();
1504 if (GET_CODE (operands[2]) != CONST_INT)
1506 if (optimize)
1507 output_asm_insn ("mov lp_count,%2", operands);
1508 else
1509 output_asm_insn ("mov %4,%2", operands);
1510 goto shiftloop;
1512 else
1514 int n = INTVAL (operands[2]);
1516 /* If the count is negative, make it 0. */
1517 if (n < 0)
1518 n = 0;
1519 /* If the count is too big, truncate it.
1520 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1521 do the intuitive thing. */
1522 else if (n > GET_MODE_BITSIZE (mode))
1523 n = GET_MODE_BITSIZE (mode);
1525 /* First see if we can do them inline. */
1526 if (n <= 8)
1528 while (--n >= 0)
1529 output_asm_insn (shift_one, operands);
1531 /* See if we can use a rotate/and. */
1532 else if (n == BITS_PER_WORD - 1)
1534 switch (code)
1536 case ASHIFT :
1537 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1538 break;
1539 case ASHIFTRT :
1540 /* The ARC doesn't have a rol insn. Use something else. */
1541 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1542 break;
1543 case LSHIFTRT :
1544 /* The ARC doesn't have a rol insn. Use something else. */
1545 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1546 break;
1547 default:
1548 break;
1551 /* Must loop. */
1552 else
1554 char buf[100];
1556 if (optimize)
1557 output_asm_insn ("mov lp_count,%c2", operands);
1558 else
1559 output_asm_insn ("mov %4,%c2", operands);
1560 shiftloop:
1561 if (optimize)
1563 if (flag_pic)
1564 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1565 ASM_COMMENT_START);
1566 else
1567 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1568 ASM_COMMENT_START);
1569 output_asm_insn (buf, operands);
1570 output_asm_insn ("sr %4,[lp_start]", operands);
1571 output_asm_insn ("add %4,%4,1", operands);
1572 output_asm_insn ("sr %4,[lp_end]", operands);
1573 output_asm_insn ("nop\n\tnop", operands);
1574 if (flag_pic)
1575 fprintf (asm_out_file, "\t%s single insn loop\n",
1576 ASM_COMMENT_START);
1577 else
1578 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1579 ASM_COMMENT_START);
1580 output_asm_insn (shift_one, operands);
1582 else
1584 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1585 ASM_COMMENT_START);
1586 output_asm_insn ("sub.f %4,%4,1", operands);
1587 output_asm_insn ("nop", operands);
1588 output_asm_insn ("bn.nd 2f", operands);
1589 output_asm_insn (shift_one, operands);
1590 output_asm_insn ("b.nd 1b", operands);
1591 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1592 ASM_COMMENT_START);
1597 return "";
1600 /* Nested function support. */
1602 /* Emit RTL insns to initialize the variable parts of a trampoline.
1603 FNADDR is an RTX for the address of the function's pure code.
1604 CXT is an RTX for the static chain value for the function. */
1606 void
1607 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1608 rtx fnaddr ATTRIBUTE_UNUSED,
1609 rtx cxt ATTRIBUTE_UNUSED)
1613 /* Set the cpu type and print out other fancy things,
1614 at the top of the file. */
1616 static void
1617 arc_file_start (void)
1619 default_file_start ();
1620 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1623 /* Print operand X (an rtx) in assembler syntax to file FILE.
1624 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1625 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1627 void
1628 arc_print_operand (FILE *file, rtx x, int code)
1630 switch (code)
1632 case '#' :
1633 /* Conditional branches. For now these are equivalent. */
1634 case '*' :
1635 /* Unconditional branches. Output the appropriate delay slot suffix. */
1636 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1638 /* There's nothing in the delay slot. */
1639 fputs (".nd", file);
1641 else
1643 rtx jump = XVECEXP (final_sequence, 0, 0);
1644 rtx delay = XVECEXP (final_sequence, 0, 1);
1645 if (INSN_ANNULLED_BRANCH_P (jump))
1646 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1647 else
1648 fputs (".d", file);
1650 return;
1651 case '?' : /* with leading "." */
1652 case '!' : /* without leading "." */
1653 /* This insn can be conditionally executed. See if the ccfsm machinery
1654 says it should be conditionalized. */
1655 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1657 /* Is this insn in a delay slot? */
1658 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1660 rtx insn = XVECEXP (final_sequence, 0, 1);
1662 /* If the insn is annulled and is from the target path, we need
1663 to inverse the condition test. */
1664 if (INSN_ANNULLED_BRANCH_P (insn))
1666 if (INSN_FROM_TARGET_P (insn))
1667 fprintf (file, "%s%s",
1668 code == '?' ? "." : "",
1669 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1670 else
1671 fprintf (file, "%s%s",
1672 code == '?' ? "." : "",
1673 arc_condition_codes[arc_ccfsm_current_cc]);
1675 else
1677 /* This insn is executed for either path, so don't
1678 conditionalize it at all. */
1679 ; /* nothing to do */
1682 else
1684 /* This insn isn't in a delay slot. */
1685 fprintf (file, "%s%s",
1686 code == '?' ? "." : "",
1687 arc_condition_codes[arc_ccfsm_current_cc]);
1690 return;
1691 case '~' :
1692 /* Output a nop if we're between a set of the condition codes,
1693 and a conditional branch. */
1694 if (last_insn_set_cc_p)
1695 fputs ("nop\n\t", file);
1696 return;
1697 case 'd' :
1698 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1699 return;
1700 case 'D' :
1701 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1702 (get_arc_condition_code (x))],
1703 file);
1704 return;
1705 case 'R' :
1706 /* Write second word of DImode or DFmode reference,
1707 register or memory. */
1708 if (GET_CODE (x) == REG)
1709 fputs (reg_names[REGNO (x)+1], file);
1710 else if (GET_CODE (x) == MEM)
1712 fputc ('[', file);
1713 /* Handle possible auto-increment. Since it is pre-increment and
1714 we have already done it, we can just use an offset of four. */
1715 /* ??? This is taken from rs6000.c I think. I don't think it is
1716 currently necessary, but keep it around. */
1717 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1718 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1719 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1720 else
1721 output_address (plus_constant (XEXP (x, 0), 4));
1722 fputc (']', file);
1724 else
1725 output_operand_lossage ("invalid operand to %%R code");
1726 return;
1727 case 'S' :
1728 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1729 || GET_CODE (x) == LABEL_REF)
1731 fprintf (file, "%%st(");
1732 output_addr_const (file, x);
1733 fprintf (file, ")");
1734 return;
1736 break;
1737 case 'H' :
1738 case 'L' :
1739 if (GET_CODE (x) == REG)
1741 /* L = least significant word, H = most significant word */
1742 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1743 fputs (reg_names[REGNO (x)], file);
1744 else
1745 fputs (reg_names[REGNO (x)+1], file);
1747 else if (GET_CODE (x) == CONST_INT
1748 || GET_CODE (x) == CONST_DOUBLE)
1750 rtx first, second;
1752 split_double (x, &first, &second);
1753 fprintf (file, "0x%08lx",
1754 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1756 else
1757 output_operand_lossage ("invalid operand to %%H/%%L code");
1758 return;
1759 case 'A' :
1761 char str[30];
1763 gcc_assert (GET_CODE (x) == CONST_DOUBLE
1764 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1766 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1767 fprintf (file, "%s", str);
1768 return;
1770 case 'U' :
1771 /* Output a load/store with update indicator if appropriate. */
1772 if (GET_CODE (x) == MEM)
1774 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1775 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1776 fputs (".a", file);
1778 else
1779 output_operand_lossage ("invalid operand to %%U code");
1780 return;
1781 case 'V' :
1782 /* Output cache bypass indicator for a load/store insn. Volatile memory
1783 refs are defined to use the cache bypass mechanism. */
1784 if (GET_CODE (x) == MEM)
1786 if (MEM_VOLATILE_P (x))
1787 fputs (".di", file);
1789 else
1790 output_operand_lossage ("invalid operand to %%V code");
1791 return;
1792 case 0 :
1793 /* Do nothing special. */
1794 break;
1795 default :
1796 /* Unknown flag. */
1797 output_operand_lossage ("invalid operand output code");
1800 switch (GET_CODE (x))
1802 case REG :
1803 fputs (reg_names[REGNO (x)], file);
1804 break;
1805 case MEM :
1806 fputc ('[', file);
1807 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1808 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1809 GET_MODE_SIZE (GET_MODE (x))));
1810 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1811 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1812 - GET_MODE_SIZE (GET_MODE (x))));
1813 else
1814 output_address (XEXP (x, 0));
1815 fputc (']', file);
1816 break;
1817 case CONST_DOUBLE :
1818 /* We handle SFmode constants here as output_addr_const doesn't. */
1819 if (GET_MODE (x) == SFmode)
1821 REAL_VALUE_TYPE d;
1822 long l;
1824 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1825 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1826 fprintf (file, "0x%08lx", l);
1827 break;
1829 /* Fall through. Let output_addr_const deal with it. */
1830 default :
1831 output_addr_const (file, x);
1832 break;
1836 /* Print a memory address as an operand to reference that memory location. */
1838 void
1839 arc_print_operand_address (FILE *file, rtx addr)
1841 register rtx base, index = 0;
1842 int offset = 0;
1844 switch (GET_CODE (addr))
1846 case REG :
1847 fputs (reg_names[REGNO (addr)], file);
1848 break;
1849 case SYMBOL_REF :
1850 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1852 fprintf (file, "%%st(");
1853 output_addr_const (file, addr);
1854 fprintf (file, ")");
1856 else
1857 output_addr_const (file, addr);
1858 break;
1859 case PLUS :
1860 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1861 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1862 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1863 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1864 else
1865 base = XEXP (addr, 0), index = XEXP (addr, 1);
1866 gcc_assert (GET_CODE (base) == REG);
1867 fputs (reg_names[REGNO (base)], file);
1868 if (index == 0)
1870 if (offset != 0)
1871 fprintf (file, ",%d", offset);
1873 else
1875 switch (GET_CODE (index))
1877 case REG:
1878 fprintf (file, ",%s", reg_names[REGNO (index)]);
1879 break;
1880 case SYMBOL_REF:
1881 fputc (',', file), output_addr_const (file, index);
1882 break;
1883 default:
1884 gcc_unreachable ();
1887 break;
1888 case PRE_INC :
1889 case PRE_DEC :
1890 /* We shouldn't get here as we've lost the mode of the memory object
1891 (which says how much to inc/dec by. */
1892 gcc_unreachable ();
1893 break;
1894 default :
1895 output_addr_const (file, addr);
1896 break;
1900 /* Update compare/branch separation marker. */
1902 static void
1903 record_cc_ref (rtx insn)
1905 last_insn_set_cc_p = current_insn_set_cc_p;
1907 switch (get_attr_cond (insn))
1909 case COND_SET :
1910 case COND_SET_ZN :
1911 case COND_SET_ZNC :
1912 if (get_attr_length (insn) == 1)
1913 current_insn_set_cc_p = 1;
1914 else
1915 current_insn_set_cc_p = 0;
1916 break;
1917 default :
1918 current_insn_set_cc_p = 0;
1919 break;
1923 /* Conditional execution support.
1925 This is based on the ARM port but for now is much simpler.
1927 A finite state machine takes care of noticing whether or not instructions
1928 can be conditionally executed, and thus decrease execution time and code
1929 size by deleting branch instructions. The fsm is controlled by
1930 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1931 in the .md file for the branch insns also have a hand in this. */
1933 /* The state of the fsm controlling condition codes are:
1934 0: normal, do nothing special
1935 1: don't output this insn
1936 2: don't output this insn
1937 3: make insns conditional
1938 4: make insns conditional
1940 State transitions (state->state by whom, under what condition):
1941 0 -> 1 final_prescan_insn, if insn is conditional branch
1942 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1943 1 -> 3 branch patterns, after having not output the conditional branch
1944 2 -> 4 branch patterns, after having not output the conditional branch
1945 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1946 (the target label has CODE_LABEL_NUMBER equal to
1947 arc_ccfsm_target_label).
1948 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1950 If the jump clobbers the conditions then we use states 2 and 4.
1952 A similar thing can be done with conditional return insns.
1954 We also handle separating branches from sets of the condition code.
1955 This is done here because knowledge of the ccfsm state is required,
1956 we may not be outputting the branch. */
1958 void
1959 arc_final_prescan_insn (rtx insn,
1960 rtx *opvec ATTRIBUTE_UNUSED,
1961 int noperands ATTRIBUTE_UNUSED)
1963 /* BODY will hold the body of INSN. */
1964 register rtx body = PATTERN (insn);
1966 /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1967 an if/then/else), and things need to be reversed. */
1968 int reverse = 0;
1970 /* If we start with a return insn, we only succeed if we find another one. */
1971 int seeking_return = 0;
1973 /* START_INSN will hold the insn from where we start looking. This is the
1974 first insn after the following code_label if REVERSE is true. */
1975 rtx start_insn = insn;
1977 /* Update compare/branch separation marker. */
1978 record_cc_ref (insn);
1980 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1981 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1982 final_scan_insn which has `optimize' as a local. */
1983 if (optimize < 2 || TARGET_NO_COND_EXEC)
1984 return;
1986 /* If in state 4, check if the target branch is reached, in order to
1987 change back to state 0. */
1988 if (arc_ccfsm_state == 4)
1990 if (insn == arc_ccfsm_target_insn)
1992 arc_ccfsm_target_insn = NULL;
1993 arc_ccfsm_state = 0;
1995 return;
1998 /* If in state 3, it is possible to repeat the trick, if this insn is an
1999 unconditional branch to a label, and immediately following this branch
2000 is the previous target label which is only used once, and the label this
2001 branch jumps to is not too far off. Or in other words "we've done the
2002 `then' part, see if we can do the `else' part." */
2003 if (arc_ccfsm_state == 3)
2005 if (simplejump_p (insn))
2007 start_insn = next_nonnote_insn (start_insn);
2008 if (GET_CODE (start_insn) == BARRIER)
2010 /* ??? Isn't this always a barrier? */
2011 start_insn = next_nonnote_insn (start_insn);
2013 if (GET_CODE (start_insn) == CODE_LABEL
2014 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2015 && LABEL_NUSES (start_insn) == 1)
2016 reverse = TRUE;
2017 else
2018 return;
2020 else if (GET_CODE (body) == RETURN)
2022 start_insn = next_nonnote_insn (start_insn);
2023 if (GET_CODE (start_insn) == BARRIER)
2024 start_insn = next_nonnote_insn (start_insn);
2025 if (GET_CODE (start_insn) == CODE_LABEL
2026 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2027 && LABEL_NUSES (start_insn) == 1)
2029 reverse = TRUE;
2030 seeking_return = 1;
2032 else
2033 return;
2035 else
2036 return;
2039 if (GET_CODE (insn) != JUMP_INSN)
2040 return;
2042 /* This jump might be paralleled with a clobber of the condition codes,
2043 the jump should always come first. */
2044 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2045 body = XVECEXP (body, 0, 0);
2047 if (reverse
2048 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2049 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2051 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2052 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2053 int then_not_else = TRUE;
2054 /* Nonzero if next insn must be the target label. */
2055 int next_must_be_target_label_p;
2056 rtx this_insn = start_insn, label = 0;
2058 /* Register the insn jumped to. */
2059 if (reverse)
2061 if (!seeking_return)
2062 label = XEXP (SET_SRC (body), 0);
2064 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2065 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2066 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2068 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2069 then_not_else = FALSE;
2071 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2072 seeking_return = 1;
2073 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2075 seeking_return = 1;
2076 then_not_else = FALSE;
2078 else
2079 gcc_unreachable ();
2081 /* See how many insns this branch skips, and what kind of insns. If all
2082 insns are okay, and the label or unconditional branch to the same
2083 label is not too far away, succeed. */
2084 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2085 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2086 insns_skipped++)
2088 rtx scanbody;
2090 this_insn = next_nonnote_insn (this_insn);
2091 if (!this_insn)
2092 break;
2094 if (next_must_be_target_label_p)
2096 if (GET_CODE (this_insn) == BARRIER)
2097 continue;
2098 if (GET_CODE (this_insn) == CODE_LABEL
2099 && this_insn == label)
2101 arc_ccfsm_state = 1;
2102 succeed = TRUE;
2104 else
2105 fail = TRUE;
2106 break;
2109 scanbody = PATTERN (this_insn);
2111 switch (GET_CODE (this_insn))
2113 case CODE_LABEL:
2114 /* Succeed if it is the target label, otherwise fail since
2115 control falls in from somewhere else. */
2116 if (this_insn == label)
2118 arc_ccfsm_state = 1;
2119 succeed = TRUE;
2121 else
2122 fail = TRUE;
2123 break;
2125 case BARRIER:
2126 /* Succeed if the following insn is the target label.
2127 Otherwise fail.
2128 If return insns are used then the last insn in a function
2129 will be a barrier. */
2130 next_must_be_target_label_p = TRUE;
2131 break;
2133 case CALL_INSN:
2134 /* Can handle a call insn if there are no insns after it.
2135 IE: The next "insn" is the target label. We don't have to
2136 worry about delay slots as such insns are SEQUENCE's inside
2137 INSN's. ??? It is possible to handle such insns though. */
2138 if (get_attr_cond (this_insn) == COND_CANUSE)
2139 next_must_be_target_label_p = TRUE;
2140 else
2141 fail = TRUE;
2142 break;
2144 case JUMP_INSN:
2145 /* If this is an unconditional branch to the same label, succeed.
2146 If it is to another label, do nothing. If it is conditional,
2147 fail. */
2148 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2150 if (GET_CODE (scanbody) == SET
2151 && GET_CODE (SET_DEST (scanbody)) == PC)
2153 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2154 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2156 arc_ccfsm_state = 2;
2157 succeed = TRUE;
2159 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2160 fail = TRUE;
2162 else if (GET_CODE (scanbody) == RETURN
2163 && seeking_return)
2165 arc_ccfsm_state = 2;
2166 succeed = TRUE;
2168 else if (GET_CODE (scanbody) == PARALLEL)
2170 if (get_attr_cond (this_insn) != COND_CANUSE)
2171 fail = TRUE;
2173 break;
2175 case INSN:
2176 /* We can only do this with insns that can use the condition
2177 codes (and don't set them). */
2178 if (GET_CODE (scanbody) == SET
2179 || GET_CODE (scanbody) == PARALLEL)
2181 if (get_attr_cond (this_insn) != COND_CANUSE)
2182 fail = TRUE;
2184 /* We can't handle other insns like sequences. */
2185 else
2186 fail = TRUE;
2187 break;
2189 default:
2190 break;
2194 if (succeed)
2196 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2197 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2198 else
2200 gcc_assert (seeking_return || arc_ccfsm_state == 2);
2201 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2203 this_insn = next_nonnote_insn (this_insn);
2204 gcc_assert (!this_insn
2205 || (GET_CODE (this_insn) != BARRIER
2206 && GET_CODE (this_insn) != CODE_LABEL));
2208 if (!this_insn)
2210 /* Oh dear! we ran off the end, give up. */
2211 extract_insn_cached (insn);
2212 arc_ccfsm_state = 0;
2213 arc_ccfsm_target_insn = NULL;
2214 return;
2216 arc_ccfsm_target_insn = this_insn;
2219 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2220 what it was. */
2221 if (!reverse)
2222 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2223 0));
2225 if (reverse || then_not_else)
2226 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2229 /* Restore recog_data. Getting the attributes of other insns can
2230 destroy this array, but final.c assumes that it remains intact
2231 across this call. */
2232 extract_insn_cached (insn);
2236 /* Record that we are currently outputting label NUM with prefix PREFIX.
2237 It it's the label we're looking for, reset the ccfsm machinery.
2239 Called from (*targetm.asm_out.internal_label). */
2241 void
2242 arc_ccfsm_at_label (const char *prefix, int num)
2244 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2245 && !strcmp (prefix, "L"))
2247 arc_ccfsm_state = 0;
2248 arc_ccfsm_target_insn = NULL_RTX;
2252 /* See if the current insn, which is a conditional branch, is to be
2253 deleted. */
2256 arc_ccfsm_branch_deleted_p (void)
2258 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2259 return 1;
2260 return 0;
2263 /* Record a branch isn't output because subsequent insns can be
2264 conditionalized. */
2266 void
2267 arc_ccfsm_record_branch_deleted (void)
2269 /* Indicate we're conditionalizing insns now. */
2270 arc_ccfsm_state += 2;
2272 /* If the next insn is a subroutine call, we still need a nop between the
2273 cc setter and user. We need to undo the effect of calling record_cc_ref
2274 for the just deleted branch. */
2275 current_insn_set_cc_p = last_insn_set_cc_p;
2278 void
2279 arc_va_start (tree valist, rtx nextarg)
2281 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2282 if (current_function_args_info < 8
2283 && (current_function_args_info & 1))
2284 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2286 std_expand_builtin_va_start (valist, nextarg);
2289 /* This is how to output a definition of an internal numbered label where
2290 PREFIX is the class of label and NUM is the number within the class. */
2292 static void
2293 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2295 arc_ccfsm_at_label (prefix, labelno);
2296 default_internal_label (stream, prefix, labelno);
2299 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2301 static void
2302 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2304 #if 0
2305 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2306 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2307 and we'll get another suffix added on if -mmangle-cpu. */
2308 if (TARGET_MANGLE_CPU_LIBGCC)
2310 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2311 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2312 arc_mangle_suffix);
2314 #endif
2317 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2319 static bool
2320 arc_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2322 if (AGGREGATE_TYPE_P (type))
2323 return true;
2324 else
2326 HOST_WIDE_INT size = int_size_in_bytes (type);
2327 return (size == -1 || size > 8);
2331 /* For ARC, All aggregates and arguments greater than 8 bytes are
2332 passed by reference. */
2334 static bool
2335 arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2336 enum machine_mode mode, tree type,
2337 bool named ATTRIBUTE_UNUSED)
2339 unsigned HOST_WIDE_INT size;
2341 if (type)
2343 if (AGGREGATE_TYPE_P (type))
2344 return true;
2345 size = int_size_in_bytes (type);
2347 else
2348 size = GET_MODE_SIZE (mode);
2350 return size > 8;