* config/arc/arc-protos.h (arc_va_arg): Remove.
[official-gcc.git] / gcc / config / arc / arc.c
blobec2520d67626452646448e3e24436b96685f32dd
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "recog.h"
41 #include "toplev.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
46 /* Which cpu we're compiling for (NULL(=base), ???). */
47 const char *arc_cpu_string;
48 int arc_cpu_type;
50 /* Name of mangle string to add to symbols to separate code compiled for each
51 cpu (or NULL). */
52 const char *arc_mangle_cpu;
54 /* Save the operands last given to a compare for use when we
55 generate a scc or bcc insn. */
56 rtx arc_compare_op0, arc_compare_op1;
58 /* Name of text, data, and rodata sections, as specified on command line.
59 Selected by -m{text,data,rodata} flags. */
60 const char *arc_text_string = ARC_DEFAULT_TEXT_SECTION;
61 const char *arc_data_string = ARC_DEFAULT_DATA_SECTION;
62 const char *arc_rodata_string = ARC_DEFAULT_RODATA_SECTION;
64 /* Name of text, data, and rodata sections used in varasm.c. */
65 const char *arc_text_section;
66 const char *arc_data_section;
67 const char *arc_rodata_section;
69 /* Array of valid operand punctuation characters. */
70 char arc_punct_chars[256];
72 /* Variables used by arc_final_prescan_insn to implement conditional
73 execution. */
74 static int arc_ccfsm_state;
75 static int arc_ccfsm_current_cc;
76 static rtx arc_ccfsm_target_insn;
77 static int arc_ccfsm_target_label;
79 /* The maximum number of insns skipped which will be conditionalised if
80 possible. */
81 #define MAX_INSNS_SKIPPED 3
83 /* A nop is needed between a 4 byte insn that sets the condition codes and
84 a branch that uses them (the same isn't true for an 8 byte insn that sets
85 the condition codes). Set by arc_final_prescan_insn. Used by
86 arc_print_operand. */
87 static int last_insn_set_cc_p;
88 static int current_insn_set_cc_p;
89 static void record_cc_ref (rtx);
90 static void arc_init_reg_tables (void);
91 static int get_arc_condition_code (rtx);
92 const struct attribute_spec arc_attribute_table[];
93 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
94 static bool arc_assemble_integer (rtx, unsigned int, int);
95 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
96 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
97 static void arc_file_start (void);
98 static void arc_internal_label (FILE *, const char *, unsigned long);
99 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
100 tree, int *, int);
101 static bool arc_rtx_costs (rtx, int, int, int *);
102 static int arc_address_cost (rtx);
103 static void arc_external_libcall (rtx);
104 static bool arc_return_in_memory (tree, tree);
105 static tree arc_gimplify_va_arg_expr (tree, tree, tree *, tree *);
107 /* Initialize the GCC target structure. */
108 #undef TARGET_ASM_ALIGNED_HI_OP
109 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
110 #undef TARGET_ASM_ALIGNED_SI_OP
111 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
112 #undef TARGET_ASM_INTEGER
113 #define TARGET_ASM_INTEGER arc_assemble_integer
115 #undef TARGET_ASM_FUNCTION_PROLOGUE
116 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
117 #undef TARGET_ASM_FUNCTION_EPILOGUE
118 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
119 #undef TARGET_ASM_FILE_START
120 #define TARGET_ASM_FILE_START arc_file_start
121 #undef TARGET_ATTRIBUTE_TABLE
122 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
123 #undef TARGET_ASM_INTERNAL_LABEL
124 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
125 #undef TARGET_ASM_EXTERNAL_LIBCALL
126 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
128 #undef TARGET_RTX_COSTS
129 #define TARGET_RTX_COSTS arc_rtx_costs
130 #undef TARGET_ADDRESS_COST
131 #define TARGET_ADDRESS_COST arc_address_cost
133 #undef TARGET_PROMOTE_FUNCTION_ARGS
134 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
135 #undef TARGET_PROMOTE_FUNCTION_RETURN
136 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
137 #undef TARGET_PROMOTE_PROTOTYPES
138 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
140 #undef TARGET_RETURN_IN_MEMORY
141 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
143 #undef TARGET_SETUP_INCOMING_VARARGS
144 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
145 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
146 #define TARGET_GIMPLIFY_VA_ARG_EXPR arc_gimplify_va_arg_expr
148 struct gcc_target targetm = TARGET_INITIALIZER;
150 /* Called by OVERRIDE_OPTIONS to initialize various things. */
152 void
153 arc_init (void)
155 char *tmp;
157 if (arc_cpu_string == 0
158 || !strcmp (arc_cpu_string, "base"))
160 /* Ensure we have a printable value for the .cpu pseudo-op. */
161 arc_cpu_string = "base";
162 arc_cpu_type = 0;
163 arc_mangle_cpu = NULL;
165 else if (ARC_EXTENSION_CPU (arc_cpu_string))
166 ; /* nothing to do */
167 else
169 error ("bad value (%s) for -mcpu switch", arc_cpu_string);
170 arc_cpu_string = "base";
171 arc_cpu_type = 0;
172 arc_mangle_cpu = NULL;
175 /* Set the pseudo-ops for the various standard sections. */
176 arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
177 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
178 arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
179 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
180 arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
181 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
183 arc_init_reg_tables ();
185 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
186 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
187 arc_punct_chars['#'] = 1;
188 arc_punct_chars['*'] = 1;
189 arc_punct_chars['?'] = 1;
190 arc_punct_chars['!'] = 1;
191 arc_punct_chars['~'] = 1;
194 /* The condition codes of the ARC, and the inverse function. */
195 static const char *const arc_condition_codes[] =
197 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
198 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
201 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
203 /* Returns the index of the ARC condition code string in
204 `arc_condition_codes'. COMPARISON should be an rtx like
205 `(eq (...) (...))'. */
207 static int
208 get_arc_condition_code (rtx comparison)
210 switch (GET_CODE (comparison))
212 case EQ : return 2;
213 case NE : return 3;
214 case GT : return 10;
215 case LE : return 11;
216 case GE : return 12;
217 case LT : return 13;
218 case GTU : return 14;
219 case LEU : return 15;
220 case LTU : return 6;
221 case GEU : return 7;
222 default : abort ();
224 /*NOTREACHED*/
225 return (42);
228 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
229 return the mode to be used for the comparison. */
231 enum machine_mode
232 arc_select_cc_mode (enum rtx_code op,
233 rtx x ATTRIBUTE_UNUSED,
234 rtx y ATTRIBUTE_UNUSED)
236 switch (op)
238 case EQ :
239 case NE :
240 return CCZNmode;
241 default :
242 switch (GET_CODE (x))
244 case AND :
245 case IOR :
246 case XOR :
247 case SIGN_EXTEND :
248 case ZERO_EXTEND :
249 return CCZNmode;
250 case ASHIFT :
251 case ASHIFTRT :
252 case LSHIFTRT :
253 return CCZNCmode;
254 default:
255 break;
258 return CCmode;
261 /* Vectors to keep interesting information about registers where it can easily
262 be got. We use to use the actual mode value as the bit number, but there
263 is (or may be) more than 32 modes now. Instead we use two tables: one
264 indexed by hard register number, and one indexed by mode. */
266 /* The purpose of arc_mode_class is to shrink the range of modes so that
267 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
268 mapped into one arc_mode_class mode. */
270 enum arc_mode_class {
271 C_MODE,
272 S_MODE, D_MODE, T_MODE, O_MODE,
273 SF_MODE, DF_MODE, TF_MODE, OF_MODE
276 /* Modes for condition codes. */
277 #define C_MODES (1 << (int) C_MODE)
279 /* Modes for single-word and smaller quantities. */
280 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
282 /* Modes for double-word and smaller quantities. */
283 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
285 /* Modes for quad-word and smaller quantities. */
286 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
288 /* Value is 1 if register/mode pair is acceptable on arc. */
290 const unsigned int arc_hard_regno_mode_ok[] = {
291 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
292 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
293 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
294 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
296 /* ??? Leave these as S_MODES for now. */
297 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
298 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
299 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
300 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
303 unsigned int arc_mode_class [NUM_MACHINE_MODES];
305 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
307 static void
308 arc_init_reg_tables (void)
310 int i;
312 for (i = 0; i < NUM_MACHINE_MODES; i++)
314 switch (GET_MODE_CLASS (i))
316 case MODE_INT:
317 case MODE_PARTIAL_INT:
318 case MODE_COMPLEX_INT:
319 if (GET_MODE_SIZE (i) <= 4)
320 arc_mode_class[i] = 1 << (int) S_MODE;
321 else if (GET_MODE_SIZE (i) == 8)
322 arc_mode_class[i] = 1 << (int) D_MODE;
323 else if (GET_MODE_SIZE (i) == 16)
324 arc_mode_class[i] = 1 << (int) T_MODE;
325 else if (GET_MODE_SIZE (i) == 32)
326 arc_mode_class[i] = 1 << (int) O_MODE;
327 else
328 arc_mode_class[i] = 0;
329 break;
330 case MODE_FLOAT:
331 case MODE_COMPLEX_FLOAT:
332 if (GET_MODE_SIZE (i) <= 4)
333 arc_mode_class[i] = 1 << (int) SF_MODE;
334 else if (GET_MODE_SIZE (i) == 8)
335 arc_mode_class[i] = 1 << (int) DF_MODE;
336 else if (GET_MODE_SIZE (i) == 16)
337 arc_mode_class[i] = 1 << (int) TF_MODE;
338 else if (GET_MODE_SIZE (i) == 32)
339 arc_mode_class[i] = 1 << (int) OF_MODE;
340 else
341 arc_mode_class[i] = 0;
342 break;
343 case MODE_CC:
344 arc_mode_class[i] = 1 << (int) C_MODE;
345 break;
346 default:
347 arc_mode_class[i] = 0;
348 break;
352 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
354 if (i < 60)
355 arc_regno_reg_class[i] = GENERAL_REGS;
356 else if (i == 60)
357 arc_regno_reg_class[i] = LPCOUNT_REG;
358 else if (i == 61)
359 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
360 else
361 arc_regno_reg_class[i] = NO_REGS;
365 /* ARC specific attribute support.
367 The ARC has these attributes:
368 interrupt - for interrupt functions
371 const struct attribute_spec arc_attribute_table[] =
373 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
374 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
375 { NULL, 0, 0, false, false, false, NULL }
378 /* Handle an "interrupt" attribute; arguments as in
379 struct attribute_spec.handler. */
380 static tree
381 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
382 tree name,
383 tree args,
384 int flags ATTRIBUTE_UNUSED,
385 bool *no_add_attrs)
387 tree value = TREE_VALUE (args);
389 if (TREE_CODE (value) != STRING_CST)
391 warning ("argument of `%s' attribute is not a string constant",
392 IDENTIFIER_POINTER (name));
393 *no_add_attrs = true;
395 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
396 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
398 warning ("argument of `%s' attribute is not \"ilink1\" or \"ilink2\"",
399 IDENTIFIER_POINTER (name));
400 *no_add_attrs = true;
403 return NULL_TREE;
407 /* Acceptable arguments to the call insn. */
410 call_address_operand (rtx op, enum machine_mode mode)
412 return (symbolic_operand (op, mode)
413 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
414 || (GET_CODE (op) == REG));
418 call_operand (rtx op, enum machine_mode mode)
420 if (GET_CODE (op) != MEM)
421 return 0;
422 op = XEXP (op, 0);
423 return call_address_operand (op, mode);
426 /* Returns 1 if OP is a symbol reference. */
429 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
431 switch (GET_CODE (op))
433 case SYMBOL_REF:
434 case LABEL_REF:
435 case CONST :
436 return 1;
437 default:
438 return 0;
442 /* Return truth value of statement that OP is a symbolic memory
443 operand of mode MODE. */
446 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
448 if (GET_CODE (op) == SUBREG)
449 op = SUBREG_REG (op);
450 if (GET_CODE (op) != MEM)
451 return 0;
452 op = XEXP (op, 0);
453 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
454 || GET_CODE (op) == LABEL_REF);
457 /* Return true if OP is a short immediate (shimm) value. */
460 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
462 if (GET_CODE (op) != CONST_INT)
463 return 0;
464 return SMALL_INT (INTVAL (op));
467 /* Return true if OP will require a long immediate (limm) value.
468 This is currently only used when calculating length attributes. */
471 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
473 switch (GET_CODE (op))
475 case SYMBOL_REF :
476 case LABEL_REF :
477 case CONST :
478 return 1;
479 case CONST_INT :
480 return !SMALL_INT (INTVAL (op));
481 case CONST_DOUBLE :
482 /* These can happen because large unsigned 32 bit constants are
483 represented this way (the multiplication patterns can cause these
484 to be generated). They also occur for SFmode values. */
485 return 1;
486 default:
487 break;
489 return 0;
492 /* Return true if OP is a MEM that when used as a load or store address will
493 require an 8 byte insn.
494 Load and store instructions don't allow the same possibilities but they're
495 similar enough that this one function will do.
496 This is currently only used when calculating length attributes. */
499 long_immediate_loadstore_operand (rtx op,
500 enum machine_mode mode ATTRIBUTE_UNUSED)
502 if (GET_CODE (op) != MEM)
503 return 0;
505 op = XEXP (op, 0);
506 switch (GET_CODE (op))
508 case SYMBOL_REF :
509 case LABEL_REF :
510 case CONST :
511 return 1;
512 case CONST_INT :
513 /* This must be handled as "st c,[limm]". Ditto for load.
514 Technically, the assembler could translate some possibilities to
515 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
516 assume that it does. */
517 return 1;
518 case CONST_DOUBLE :
519 /* These can happen because large unsigned 32 bit constants are
520 represented this way (the multiplication patterns can cause these
521 to be generated). They also occur for SFmode values. */
522 return 1;
523 case REG :
524 return 0;
525 case PLUS :
526 if (GET_CODE (XEXP (op, 1)) == CONST_INT
527 && !SMALL_INT (INTVAL (XEXP (op, 1))))
528 return 1;
529 return 0;
530 default:
531 break;
533 return 0;
536 /* Return true if OP is an acceptable argument for a single word
537 move source. */
540 move_src_operand (rtx op, enum machine_mode mode)
542 switch (GET_CODE (op))
544 case SYMBOL_REF :
545 case LABEL_REF :
546 case CONST :
547 return 1;
548 case CONST_INT :
549 return (LARGE_INT (INTVAL (op)));
550 case CONST_DOUBLE :
551 /* We can handle DImode integer constants in SImode if the value
552 (signed or unsigned) will fit in 32 bits. This is needed because
553 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
554 if (mode == SImode)
555 return arc_double_limm_p (op);
556 /* We can handle 32 bit floating point constants. */
557 if (mode == SFmode)
558 return GET_MODE (op) == SFmode;
559 return 0;
560 case REG :
561 return register_operand (op, mode);
562 case SUBREG :
563 /* (subreg (mem ...) ...) can occur here if the inner part was once a
564 pseudo-reg and is now a stack slot. */
565 if (GET_CODE (SUBREG_REG (op)) == MEM)
566 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
567 else
568 return register_operand (op, mode);
569 case MEM :
570 return address_operand (XEXP (op, 0), mode);
571 default :
572 return 0;
576 /* Return true if OP is an acceptable argument for a double word
577 move source. */
580 move_double_src_operand (rtx op, enum machine_mode mode)
582 switch (GET_CODE (op))
584 case REG :
585 return register_operand (op, mode);
586 case SUBREG :
587 /* (subreg (mem ...) ...) can occur here if the inner part was once a
588 pseudo-reg and is now a stack slot. */
589 if (GET_CODE (SUBREG_REG (op)) == MEM)
590 return move_double_src_operand (SUBREG_REG (op), mode);
591 else
592 return register_operand (op, mode);
593 case MEM :
594 /* Disallow auto inc/dec for now. */
595 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
596 || GET_CODE (XEXP (op, 0)) == PRE_INC)
597 return 0;
598 return address_operand (XEXP (op, 0), mode);
599 case CONST_INT :
600 case CONST_DOUBLE :
601 return 1;
602 default :
603 return 0;
607 /* Return true if OP is an acceptable argument for a move destination. */
610 move_dest_operand (rtx op, enum machine_mode mode)
612 switch (GET_CODE (op))
614 case REG :
615 return register_operand (op, mode);
616 case SUBREG :
617 /* (subreg (mem ...) ...) can occur here if the inner part was once a
618 pseudo-reg and is now a stack slot. */
619 if (GET_CODE (SUBREG_REG (op)) == MEM)
620 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
621 else
622 return register_operand (op, mode);
623 case MEM :
624 return address_operand (XEXP (op, 0), mode);
625 default :
626 return 0;
630 /* Return true if OP is valid load with update operand. */
633 load_update_operand (rtx op, enum machine_mode mode)
635 if (GET_CODE (op) != MEM
636 || GET_MODE (op) != mode)
637 return 0;
638 op = XEXP (op, 0);
639 if (GET_CODE (op) != PLUS
640 || GET_MODE (op) != Pmode
641 || !register_operand (XEXP (op, 0), Pmode)
642 || !nonmemory_operand (XEXP (op, 1), Pmode))
643 return 0;
644 return 1;
647 /* Return true if OP is valid store with update operand. */
650 store_update_operand (rtx op, enum machine_mode mode)
652 if (GET_CODE (op) != MEM
653 || GET_MODE (op) != mode)
654 return 0;
655 op = XEXP (op, 0);
656 if (GET_CODE (op) != PLUS
657 || GET_MODE (op) != Pmode
658 || !register_operand (XEXP (op, 0), Pmode)
659 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
660 && SMALL_INT (INTVAL (XEXP (op, 1)))))
661 return 0;
662 return 1;
665 /* Return true if OP is a non-volatile non-immediate operand.
666 Volatile memory refs require a special "cache-bypass" instruction
667 and only the standard movXX patterns are set up to handle them. */
670 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
672 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
673 return 0;
674 return nonimmediate_operand (op, mode);
677 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
678 to check the range carefully since this predicate is used in DImode
679 contexts. */
682 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
684 /* All allowed constants will fit a CONST_INT. */
685 return (GET_CODE (op) == CONST_INT
686 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
689 /* Accept integer operands in the range 0..0xffffffff. We have to check the
690 range carefully since this predicate is used in DImode contexts. Also, we
691 need some extra crud to make it work when hosted on 64-bit machines. */
694 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
696 #if HOST_BITS_PER_WIDE_INT > 32
697 /* All allowed constants will fit a CONST_INT. */
698 return (GET_CODE (op) == CONST_INT
699 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
700 #else
701 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
702 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
703 #endif
706 /* Return 1 if OP is a comparison operator valid for the mode of CC.
707 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
709 Some insns only set a few bits in the condition code. So only allow those
710 comparisons that use the bits that are valid. */
713 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
715 enum rtx_code code;
716 if (!COMPARISON_P (op))
717 return 0;
719 code = GET_CODE (op);
720 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
721 return (code == EQ || code == NE);
722 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
723 return (code == EQ || code == NE
724 || code == LTU || code == GEU || code == GTU || code == LEU);
725 return 1;
728 /* Misc. utilities. */
730 /* X and Y are two things to compare using CODE. Emit the compare insn and
731 return the rtx for the cc reg in the proper mode. */
734 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
736 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
737 rtx cc_reg;
739 cc_reg = gen_rtx_REG (mode, 61);
741 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
742 gen_rtx_COMPARE (mode, x, y)));
744 return cc_reg;
747 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
748 We assume the value can be either signed or unsigned. */
751 arc_double_limm_p (rtx value)
753 HOST_WIDE_INT low, high;
755 if (GET_CODE (value) != CONST_DOUBLE)
756 abort ();
758 low = CONST_DOUBLE_LOW (value);
759 high = CONST_DOUBLE_HIGH (value);
761 if (low & 0x80000000)
763 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
764 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
765 == - (unsigned HOST_WIDE_INT) 0x80000000)
766 && high == -1));
768 else
770 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
774 /* Do any needed setup for a variadic function. For the ARC, we must
775 create a register parameter block, and then copy any anonymous arguments
776 in registers to memory.
778 CUM has not been updated for the last named argument which has type TYPE
779 and mode MODE, and we rely on this fact.
781 We do things a little weird here. We're supposed to only allocate space
782 for the anonymous arguments. However we need to keep the stack eight byte
783 aligned. So we round the space up if necessary, and leave it to va_start
784 to compensate. */
786 static void
787 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
788 enum machine_mode mode,
789 tree type ATTRIBUTE_UNUSED,
790 int *pretend_size,
791 int no_rtl)
793 int first_anon_arg;
795 /* All BLKmode values are passed by reference. */
796 if (mode == BLKmode)
797 abort ();
799 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
800 / UNITS_PER_WORD);
802 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
804 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
805 int first_reg_offset = first_anon_arg;
806 /* Size in words to "pretend" allocate. */
807 int size = MAX_ARC_PARM_REGS - first_reg_offset;
808 /* Extra slop to keep stack eight byte aligned. */
809 int align_slop = size & 1;
810 rtx regblock;
812 regblock = gen_rtx_MEM (BLKmode,
813 plus_constant (arg_pointer_rtx,
814 FIRST_PARM_OFFSET (0)
815 + align_slop * UNITS_PER_WORD));
816 set_mem_alias_set (regblock, get_varargs_alias_set ());
817 set_mem_align (regblock, BITS_PER_WORD);
818 move_block_from_reg (first_reg_offset, regblock,
819 MAX_ARC_PARM_REGS - first_reg_offset);
821 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
822 * UNITS_PER_WORD);
826 /* Cost functions. */
828 /* Compute a (partial) cost for rtx X. Return true if the complete
829 cost has been computed, and false if subexpressions should be
830 scanned. In either case, *TOTAL contains the cost result. */
832 static bool
833 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
835 switch (code)
837 /* Small integers are as cheap as registers. 4 byte values can
838 be fetched as immediate constants - let's give that the cost
839 of an extra insn. */
840 case CONST_INT:
841 if (SMALL_INT (INTVAL (x)))
843 *total = 0;
844 return true;
846 /* FALLTHRU */
848 case CONST:
849 case LABEL_REF:
850 case SYMBOL_REF:
851 *total = COSTS_N_INSNS (1);
852 return true;
854 case CONST_DOUBLE:
856 rtx high, low;
857 split_double (x, &high, &low);
858 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
859 + !SMALL_INT (INTVAL (low)));
860 return true;
863 /* Encourage synth_mult to find a synthetic multiply when reasonable.
864 If we need more than 12 insns to do a multiply, then go out-of-line,
865 since the call overhead will be < 10% of the cost of the multiply. */
866 case ASHIFT:
867 case ASHIFTRT:
868 case LSHIFTRT:
869 if (TARGET_SHIFTER)
870 *total = COSTS_N_INSNS (1);
871 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
872 *total = COSTS_N_INSNS (16);
873 else
874 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
875 return false;
877 default:
878 return false;
883 /* Provide the costs of an addressing mode that contains ADDR.
884 If ADDR is not a valid address, its cost is irrelevant. */
886 static int
887 arc_address_cost (rtx addr)
889 switch (GET_CODE (addr))
891 case REG :
892 return 1;
894 case LABEL_REF :
895 case SYMBOL_REF :
896 case CONST :
897 return 2;
899 case PLUS :
901 register rtx plus0 = XEXP (addr, 0);
902 register rtx plus1 = XEXP (addr, 1);
904 if (GET_CODE (plus0) != REG)
905 break;
907 switch (GET_CODE (plus1))
909 case CONST_INT :
910 return SMALL_INT (plus1) ? 1 : 2;
911 case CONST :
912 case SYMBOL_REF :
913 case LABEL_REF :
914 return 2;
915 default:
916 break;
918 break;
920 default:
921 break;
924 return 4;
927 /* Function prologue/epilogue handlers. */
929 /* ARC stack frames look like:
931 Before call After call
932 +-----------------------+ +-----------------------+
933 | | | |
934 high | local variables, | | local variables, |
935 mem | reg save area, etc. | | reg save area, etc. |
936 | | | |
937 +-----------------------+ +-----------------------+
938 | | | |
939 | arguments on stack. | | arguments on stack. |
940 | | | |
941 SP+16->+-----------------------+FP+48->+-----------------------+
942 | 4 word save area for | | reg parm save area, |
943 | return addr, prev %fp | | only created for |
944 SP+0->+-----------------------+ | variable argument |
945 | functions |
946 FP+16->+-----------------------+
947 | 4 word save area for |
948 | return addr, prev %fp |
949 FP+0->+-----------------------+
950 | |
951 | local variables |
952 | |
953 +-----------------------+
954 | |
955 | register save area |
956 | |
957 +-----------------------+
958 | |
959 | alloca allocations |
960 | |
961 +-----------------------+
962 | |
963 | arguments on stack |
964 | |
965 SP+16->+-----------------------+
966 low | 4 word save area for |
967 memory | return addr, prev %fp |
968 SP+0->+-----------------------+
970 Notes:
971 1) The "reg parm save area" does not exist for non variable argument fns.
972 The "reg parm save area" can be eliminated completely if we created our
973 own va-arc.h, but that has tradeoffs as well (so it's not done). */
975 /* Structure to be filled in by arc_compute_frame_size with register
976 save masks, and offsets for the current function. */
977 struct arc_frame_info
979 unsigned int total_size; /* # bytes that the entire frame takes up. */
980 unsigned int extra_size; /* # bytes of extra stuff. */
981 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
982 unsigned int args_size; /* # bytes that outgoing arguments take up. */
983 unsigned int reg_size; /* # bytes needed to store regs. */
984 unsigned int var_size; /* # bytes that variables take up. */
985 unsigned int reg_offset; /* Offset from new sp to store regs. */
986 unsigned int gmask; /* Mask of saved gp registers. */
987 int initialized; /* Nonzero if frame size already calculated. */
990 /* Current frame information calculated by arc_compute_frame_size. */
991 static struct arc_frame_info current_frame_info;
993 /* Zero structure to initialize current_frame_info. */
994 static struct arc_frame_info zero_frame_info;
996 /* Type of function DECL.
998 The result is cached. To reset the cache at the end of a function,
999 call with DECL = NULL_TREE. */
1001 enum arc_function_type
1002 arc_compute_function_type (tree decl)
1004 tree a;
1005 /* Cached value. */
1006 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1007 /* Last function we were called for. */
1008 static tree last_fn = NULL_TREE;
1010 /* Resetting the cached value? */
1011 if (decl == NULL_TREE)
1013 fn_type = ARC_FUNCTION_UNKNOWN;
1014 last_fn = NULL_TREE;
1015 return fn_type;
1018 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1019 return fn_type;
1021 /* Assume we have a normal function (not an interrupt handler). */
1022 fn_type = ARC_FUNCTION_NORMAL;
1024 /* Now see if this is an interrupt handler. */
1025 for (a = DECL_ATTRIBUTES (current_function_decl);
1027 a = TREE_CHAIN (a))
1029 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1031 if (name == get_identifier ("__interrupt__")
1032 && list_length (args) == 1
1033 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1035 tree value = TREE_VALUE (args);
1037 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1038 fn_type = ARC_FUNCTION_ILINK1;
1039 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1040 fn_type = ARC_FUNCTION_ILINK2;
1041 else
1042 abort ();
1043 break;
1047 last_fn = decl;
1048 return fn_type;
1051 #define ILINK1_REGNUM 29
1052 #define ILINK2_REGNUM 30
1053 #define RETURN_ADDR_REGNUM 31
1054 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1055 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1057 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1058 The return address and frame pointer are treated separately.
1059 Don't consider them here. */
1060 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1061 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1062 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1064 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1066 /* Return the bytes needed to compute the frame pointer from the current
1067 stack pointer.
1069 SIZE is the size needed for local variables. */
1071 unsigned int
1072 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1074 int regno;
1075 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1076 unsigned int reg_size, reg_offset;
1077 unsigned int gmask;
1078 enum arc_function_type fn_type;
1079 int interrupt_p;
1081 var_size = size;
1082 args_size = current_function_outgoing_args_size;
1083 pretend_size = current_function_pretend_args_size;
1084 extra_size = FIRST_PARM_OFFSET (0);
1085 total_size = extra_size + pretend_size + args_size + var_size;
1086 reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1087 reg_size = 0;
1088 gmask = 0;
1090 /* See if this is an interrupt handler. Call used registers must be saved
1091 for them too. */
1092 fn_type = arc_compute_function_type (current_function_decl);
1093 interrupt_p = ARC_INTERRUPT_P (fn_type);
1095 /* Calculate space needed for registers.
1096 ??? We ignore the extension registers for now. */
1098 for (regno = 0; regno <= 31; regno++)
1100 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1102 reg_size += UNITS_PER_WORD;
1103 gmask |= 1 << regno;
1107 total_size += reg_size;
1109 /* If the only space to allocate is the fp/blink save area this is an
1110 empty frame. However, if we'll be making a function call we need to
1111 allocate a stack frame for our callee's fp/blink save area. */
1112 if (total_size == extra_size
1113 && !MUST_SAVE_RETURN_ADDR)
1114 total_size = extra_size = 0;
1116 total_size = ARC_STACK_ALIGN (total_size);
1118 /* Save computed information. */
1119 current_frame_info.total_size = total_size;
1120 current_frame_info.extra_size = extra_size;
1121 current_frame_info.pretend_size = pretend_size;
1122 current_frame_info.var_size = var_size;
1123 current_frame_info.args_size = args_size;
1124 current_frame_info.reg_size = reg_size;
1125 current_frame_info.reg_offset = reg_offset;
1126 current_frame_info.gmask = gmask;
1127 current_frame_info.initialized = reload_completed;
1129 /* Ok, we're done. */
1130 return total_size;
1133 /* Common code to save/restore registers. */
1135 void
1136 arc_save_restore (FILE *file,
1137 const char *base_reg,
1138 unsigned int offset,
1139 unsigned int gmask,
1140 const char *op)
1142 int regno;
1144 if (gmask == 0)
1145 return;
1147 for (regno = 0; regno <= 31; regno++)
1149 if ((gmask & (1L << regno)) != 0)
1151 fprintf (file, "\t%s %s,[%s,%d]\n",
1152 op, reg_names[regno], base_reg, offset);
1153 offset += UNITS_PER_WORD;
1158 /* Target hook to assemble an integer object. The ARC version needs to
1159 emit a special directive for references to labels and function
1160 symbols. */
1162 static bool
1163 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1165 if (size == UNITS_PER_WORD && aligned_p
1166 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1167 || GET_CODE (x) == LABEL_REF))
1169 fputs ("\t.word\t%st(", asm_out_file);
1170 output_addr_const (asm_out_file, x);
1171 fputs (")\n", asm_out_file);
1172 return true;
1174 return default_assemble_integer (x, size, aligned_p);
1177 /* Set up the stack and frame pointer (if desired) for the function. */
1179 static void
1180 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1182 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1183 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1184 unsigned int gmask = current_frame_info.gmask;
1185 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1187 /* If this is an interrupt handler, set up our stack frame.
1188 ??? Optimize later. */
1189 if (ARC_INTERRUPT_P (fn_type))
1191 fprintf (file, "\t%s interrupt handler\n",
1192 ASM_COMMENT_START);
1193 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1196 /* This is only for the human reader. */
1197 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1198 ASM_COMMENT_START, ASM_COMMENT_START,
1199 current_frame_info.var_size,
1200 current_frame_info.reg_size / 4,
1201 current_frame_info.args_size,
1202 current_frame_info.extra_size);
1204 size = ARC_STACK_ALIGN (size);
1205 size = (! current_frame_info.initialized
1206 ? arc_compute_frame_size (size)
1207 : current_frame_info.total_size);
1209 /* These cases shouldn't happen. Catch them now. */
1210 if (size == 0 && gmask)
1211 abort ();
1213 /* Allocate space for register arguments if this is a variadic function. */
1214 if (current_frame_info.pretend_size != 0)
1215 fprintf (file, "\tsub %s,%s,%d\n",
1216 sp_str, sp_str, current_frame_info.pretend_size);
1218 /* The home-grown ABI says link register is saved first. */
1219 if (MUST_SAVE_RETURN_ADDR)
1220 fprintf (file, "\tst %s,[%s,%d]\n",
1221 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1223 /* Set up the previous frame pointer next (if we need to). */
1224 if (frame_pointer_needed)
1226 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1227 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1230 /* ??? We don't handle the case where the saved regs are more than 252
1231 bytes away from sp. This can be handled by decrementing sp once, saving
1232 the regs, and then decrementing it again. The epilogue doesn't have this
1233 problem as the `ld' insn takes reg+limm values (though it would be more
1234 efficient to avoid reg+limm). */
1236 /* Allocate the stack frame. */
1237 if (size - current_frame_info.pretend_size > 0)
1238 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1239 sp_str, sp_str, size - current_frame_info.pretend_size);
1241 /* Save any needed call-saved regs (and call-used if this is an
1242 interrupt handler). */
1243 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1244 /* The zeroing of these two bits is unnecessary,
1245 but leave this in for clarity. */
1246 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1247 "st");
1249 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1252 /* Do any necessary cleanup after a function to restore stack, frame,
1253 and regs. */
1255 static void
1256 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1258 rtx epilogue_delay = current_function_epilogue_delay_list;
1259 int noepilogue = FALSE;
1260 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1262 /* This is only for the human reader. */
1263 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1265 size = ARC_STACK_ALIGN (size);
1266 size = (!current_frame_info.initialized
1267 ? arc_compute_frame_size (size)
1268 : current_frame_info.total_size);
1270 if (size == 0 && epilogue_delay == 0)
1272 rtx insn = get_last_insn ();
1274 /* If the last insn was a BARRIER, we don't have to write any code
1275 because a jump (aka return) was put there. */
1276 if (GET_CODE (insn) == NOTE)
1277 insn = prev_nonnote_insn (insn);
1278 if (insn && GET_CODE (insn) == BARRIER)
1279 noepilogue = TRUE;
1282 if (!noepilogue)
1284 unsigned int pretend_size = current_frame_info.pretend_size;
1285 unsigned int frame_size = size - pretend_size;
1286 int restored, fp_restored_p;
1287 int can_trust_sp_p = !current_function_calls_alloca;
1288 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1289 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1291 /* ??? There are lots of optimizations that can be done here.
1292 EG: Use fp to restore regs if it's closer.
1293 Maybe in time we'll do them all. For now, always restore regs from
1294 sp, but don't restore sp if we don't have to. */
1296 if (!can_trust_sp_p)
1298 if (!frame_pointer_needed)
1299 abort ();
1300 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1301 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1304 /* Restore any saved registers. */
1305 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1306 /* The zeroing of these two bits is unnecessary,
1307 but leave this in for clarity. */
1308 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1309 "ld");
1311 if (MUST_SAVE_RETURN_ADDR)
1312 fprintf (file, "\tld %s,[%s,%d]\n",
1313 reg_names[RETURN_ADDR_REGNUM],
1314 frame_pointer_needed ? fp_str : sp_str,
1315 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1317 /* Keep track of how much of the stack pointer we've restored.
1318 It makes the following a lot more readable. */
1319 restored = 0;
1320 fp_restored_p = 0;
1322 /* We try to emit the epilogue delay slot insn right after the load
1323 of the return address register so that it can execute with the
1324 stack intact. Secondly, loads are delayed. */
1325 /* ??? If stack intactness is important, always emit now. */
1326 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1328 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1, NULL);
1329 epilogue_delay = NULL_RTX;
1332 if (frame_pointer_needed)
1334 /* Try to restore the frame pointer in the delay slot. We can't,
1335 however, if any of these is true. */
1336 if (epilogue_delay != NULL_RTX
1337 || !SMALL_INT (frame_size)
1338 || pretend_size
1339 || ARC_INTERRUPT_P (fn_type))
1341 /* Note that we restore fp and sp here! */
1342 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1343 restored += frame_size;
1344 fp_restored_p = 1;
1347 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1348 || ARC_INTERRUPT_P (fn_type))
1350 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1351 restored += frame_size;
1354 /* These must be done before the return insn because the delay slot
1355 does the final stack restore. */
1356 if (ARC_INTERRUPT_P (fn_type))
1358 if (epilogue_delay)
1360 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1,
1361 NULL);
1365 /* Emit the return instruction. */
1367 static const int regs[4] = {
1368 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1370 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1373 /* If the only register saved is the return address, we need a
1374 nop, unless we have an instruction to put into it. Otherwise
1375 we don't since reloading multiple registers doesn't reference
1376 the register being loaded. */
1378 if (ARC_INTERRUPT_P (fn_type))
1379 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1380 else if (epilogue_delay != NULL_RTX)
1382 if (frame_pointer_needed && !fp_restored_p)
1383 abort ();
1384 if (restored < size)
1385 abort ();
1386 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1, NULL);
1388 else if (frame_pointer_needed && !fp_restored_p)
1390 if (!SMALL_INT (frame_size))
1391 abort ();
1392 /* Note that we restore fp and sp here! */
1393 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1395 else if (restored < size)
1397 if (!SMALL_INT (size - restored))
1398 abort ();
1399 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1400 sp_str, sp_str, size - restored);
1402 else
1403 fprintf (file, "\tnop\n");
1406 /* Reset state info for each function. */
1407 current_frame_info = zero_frame_info;
1408 arc_compute_function_type (NULL_TREE);
1411 /* Define the number of delay slots needed for the function epilogue.
1413 Interrupt handlers can't have any epilogue delay slots (it's always needed
1414 for something else, I think). For normal functions, we have to worry about
1415 using call-saved regs as they'll be restored before the delay slot insn.
1416 Functions with non-empty frames already have enough choices for the epilogue
1417 delay slot so for now we only consider functions with empty frames. */
1420 arc_delay_slots_for_epilogue (void)
1422 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1423 return 0;
1424 if (!current_frame_info.initialized)
1425 (void) arc_compute_frame_size (get_frame_size ());
1426 if (current_frame_info.total_size == 0)
1427 return 1;
1428 return 0;
1431 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1432 Any single length instruction which doesn't reference the stack or frame
1433 pointer or any call-saved register is OK. SLOT will always be 0. */
1436 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1438 if (slot != 0)
1439 abort ();
1441 if (get_attr_length (trial) == 1
1442 /* If registers where saved, presumably there's more than enough
1443 possibilities for the delay slot. The alternative is something
1444 more complicated (of course, if we expanded the epilogue as rtl
1445 this problem would go away). */
1446 /* ??? Note that this will always be true since only functions with
1447 empty frames have epilogue delay slots. See
1448 arc_delay_slots_for_epilogue. */
1449 && current_frame_info.gmask == 0
1450 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1451 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1452 return 1;
1453 return 0;
1456 /* PIC */
1458 /* Emit special PIC prologues and epilogues. */
1460 void
1461 arc_finalize_pic (void)
1463 /* nothing to do */
1466 /* Return true if OP is a shift operator. */
1469 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1471 switch (GET_CODE (op))
1473 case ASHIFTRT:
1474 case LSHIFTRT:
1475 case ASHIFT:
1476 return 1;
1477 default:
1478 return 0;
1482 /* Output the assembler code for doing a shift.
1483 We go to a bit of trouble to generate efficient code as the ARC only has
1484 single bit shifts. This is taken from the h8300 port. We only have one
1485 mode of shifting and can't access individual bytes like the h8300 can, so
1486 this is greatly simplified (at the expense of not generating hyper-
1487 efficient code).
1489 This function is not used if the variable shift insns are present. */
1491 /* ??? We assume the output operand is the same as operand 1.
1492 This can be optimized (deleted) in the case of 1 bit shifts. */
1493 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1494 using it here will give us a chance to play with it. */
1496 const char *
1497 output_shift (rtx *operands)
1499 rtx shift = operands[3];
1500 enum machine_mode mode = GET_MODE (shift);
1501 enum rtx_code code = GET_CODE (shift);
1502 const char *shift_one;
1504 if (mode != SImode)
1505 abort ();
1507 switch (code)
1509 case ASHIFT: shift_one = "asl %0,%0"; break;
1510 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1511 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1512 default: abort ();
1515 if (GET_CODE (operands[2]) != CONST_INT)
1517 if (optimize)
1518 output_asm_insn ("mov lp_count,%2", operands);
1519 else
1520 output_asm_insn ("mov %4,%2", operands);
1521 goto shiftloop;
1523 else
1525 int n = INTVAL (operands[2]);
1527 /* If the count is negative, make it 0. */
1528 if (n < 0)
1529 n = 0;
1530 /* If the count is too big, truncate it.
1531 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1532 do the intuitive thing. */
1533 else if (n > GET_MODE_BITSIZE (mode))
1534 n = GET_MODE_BITSIZE (mode);
1536 /* First see if we can do them inline. */
1537 if (n <= 8)
1539 while (--n >= 0)
1540 output_asm_insn (shift_one, operands);
1542 /* See if we can use a rotate/and. */
1543 else if (n == BITS_PER_WORD - 1)
1545 switch (code)
1547 case ASHIFT :
1548 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1549 break;
1550 case ASHIFTRT :
1551 /* The ARC doesn't have a rol insn. Use something else. */
1552 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1553 break;
1554 case LSHIFTRT :
1555 /* The ARC doesn't have a rol insn. Use something else. */
1556 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1557 break;
1558 default:
1559 break;
1562 /* Must loop. */
1563 else
1565 char buf[100];
1567 if (optimize)
1568 output_asm_insn ("mov lp_count,%c2", operands);
1569 else
1570 output_asm_insn ("mov %4,%c2", operands);
1571 shiftloop:
1572 if (optimize)
1574 if (flag_pic)
1575 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1576 ASM_COMMENT_START);
1577 else
1578 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1579 ASM_COMMENT_START);
1580 output_asm_insn (buf, operands);
1581 output_asm_insn ("sr %4,[lp_start]", operands);
1582 output_asm_insn ("add %4,%4,1", operands);
1583 output_asm_insn ("sr %4,[lp_end]", operands);
1584 output_asm_insn ("nop\n\tnop", operands);
1585 if (flag_pic)
1586 fprintf (asm_out_file, "\t%s single insn loop\n",
1587 ASM_COMMENT_START);
1588 else
1589 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1590 ASM_COMMENT_START);
1591 output_asm_insn (shift_one, operands);
1593 else
1595 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1596 ASM_COMMENT_START);
1597 output_asm_insn ("sub.f %4,%4,1", operands);
1598 output_asm_insn ("nop", operands);
1599 output_asm_insn ("bn.nd 2f", operands);
1600 output_asm_insn (shift_one, operands);
1601 output_asm_insn ("b.nd 1b", operands);
1602 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1603 ASM_COMMENT_START);
1608 return "";
1611 /* Nested function support. */
1613 /* Emit RTL insns to initialize the variable parts of a trampoline.
1614 FNADDR is an RTX for the address of the function's pure code.
1615 CXT is an RTX for the static chain value for the function. */
1617 void
1618 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1619 rtx fnaddr ATTRIBUTE_UNUSED,
1620 rtx cxt ATTRIBUTE_UNUSED)
1624 /* Set the cpu type and print out other fancy things,
1625 at the top of the file. */
1627 static void
1628 arc_file_start (void)
1630 default_file_start ();
1631 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1634 /* Print operand X (an rtx) in assembler syntax to file FILE.
1635 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1636 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1638 void
1639 arc_print_operand (FILE *file, rtx x, int code)
1641 switch (code)
1643 case '#' :
1644 /* Conditional branches. For now these are equivalent. */
1645 case '*' :
1646 /* Unconditional branches. Output the appropriate delay slot suffix. */
1647 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1649 /* There's nothing in the delay slot. */
1650 fputs (".nd", file);
1652 else
1654 rtx jump = XVECEXP (final_sequence, 0, 0);
1655 rtx delay = XVECEXP (final_sequence, 0, 1);
1656 if (INSN_ANNULLED_BRANCH_P (jump))
1657 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1658 else
1659 fputs (".d", file);
1661 return;
1662 case '?' : /* with leading "." */
1663 case '!' : /* without leading "." */
1664 /* This insn can be conditionally executed. See if the ccfsm machinery
1665 says it should be conditionalized. */
1666 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1668 /* Is this insn in a delay slot? */
1669 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1671 rtx insn = XVECEXP (final_sequence, 0, 1);
1673 /* If the insn is annulled and is from the target path, we need
1674 to inverse the condition test. */
1675 if (INSN_ANNULLED_BRANCH_P (insn))
1677 if (INSN_FROM_TARGET_P (insn))
1678 fprintf (file, "%s%s",
1679 code == '?' ? "." : "",
1680 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1681 else
1682 fprintf (file, "%s%s",
1683 code == '?' ? "." : "",
1684 arc_condition_codes[arc_ccfsm_current_cc]);
1686 else
1688 /* This insn is executed for either path, so don't
1689 conditionalize it at all. */
1690 ; /* nothing to do */
1693 else
1695 /* This insn isn't in a delay slot. */
1696 fprintf (file, "%s%s",
1697 code == '?' ? "." : "",
1698 arc_condition_codes[arc_ccfsm_current_cc]);
1701 return;
1702 case '~' :
1703 /* Output a nop if we're between a set of the condition codes,
1704 and a conditional branch. */
1705 if (last_insn_set_cc_p)
1706 fputs ("nop\n\t", file);
1707 return;
1708 case 'd' :
1709 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1710 return;
1711 case 'D' :
1712 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1713 (get_arc_condition_code (x))],
1714 file);
1715 return;
1716 case 'R' :
1717 /* Write second word of DImode or DFmode reference,
1718 register or memory. */
1719 if (GET_CODE (x) == REG)
1720 fputs (reg_names[REGNO (x)+1], file);
1721 else if (GET_CODE (x) == MEM)
1723 fputc ('[', file);
1724 /* Handle possible auto-increment. Since it is pre-increment and
1725 we have already done it, we can just use an offset of four. */
1726 /* ??? This is taken from rs6000.c I think. I don't think it is
1727 currently necessary, but keep it around. */
1728 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1729 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1730 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1731 else
1732 output_address (plus_constant (XEXP (x, 0), 4));
1733 fputc (']', file);
1735 else
1736 output_operand_lossage ("invalid operand to %%R code");
1737 return;
1738 case 'S' :
1739 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1740 || GET_CODE (x) == LABEL_REF)
1742 fprintf (file, "%%st(");
1743 output_addr_const (file, x);
1744 fprintf (file, ")");
1745 return;
1747 break;
1748 case 'H' :
1749 case 'L' :
1750 if (GET_CODE (x) == REG)
1752 /* L = least significant word, H = most significant word */
1753 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1754 fputs (reg_names[REGNO (x)], file);
1755 else
1756 fputs (reg_names[REGNO (x)+1], file);
1758 else if (GET_CODE (x) == CONST_INT
1759 || GET_CODE (x) == CONST_DOUBLE)
1761 rtx first, second;
1763 split_double (x, &first, &second);
1764 fprintf (file, "0x%08lx",
1765 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1767 else
1768 output_operand_lossage ("invalid operand to %%H/%%L code");
1769 return;
1770 case 'A' :
1772 char str[30];
1774 if (GET_CODE (x) != CONST_DOUBLE
1775 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
1776 abort ();
1778 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1779 fprintf (file, "%s", str);
1780 return;
1782 case 'U' :
1783 /* Output a load/store with update indicator if appropriate. */
1784 if (GET_CODE (x) == MEM)
1786 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1787 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1788 fputs (".a", file);
1790 else
1791 output_operand_lossage ("invalid operand to %%U code");
1792 return;
1793 case 'V' :
1794 /* Output cache bypass indicator for a load/store insn. Volatile memory
1795 refs are defined to use the cache bypass mechanism. */
1796 if (GET_CODE (x) == MEM)
1798 if (MEM_VOLATILE_P (x))
1799 fputs (".di", file);
1801 else
1802 output_operand_lossage ("invalid operand to %%V code");
1803 return;
1804 case 0 :
1805 /* Do nothing special. */
1806 break;
1807 default :
1808 /* Unknown flag. */
1809 output_operand_lossage ("invalid operand output code");
1812 switch (GET_CODE (x))
1814 case REG :
1815 fputs (reg_names[REGNO (x)], file);
1816 break;
1817 case MEM :
1818 fputc ('[', file);
1819 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1820 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1821 GET_MODE_SIZE (GET_MODE (x))));
1822 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1823 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1824 - GET_MODE_SIZE (GET_MODE (x))));
1825 else
1826 output_address (XEXP (x, 0));
1827 fputc (']', file);
1828 break;
1829 case CONST_DOUBLE :
1830 /* We handle SFmode constants here as output_addr_const doesn't. */
1831 if (GET_MODE (x) == SFmode)
1833 REAL_VALUE_TYPE d;
1834 long l;
1836 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1837 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1838 fprintf (file, "0x%08lx", l);
1839 break;
1841 /* Fall through. Let output_addr_const deal with it. */
1842 default :
1843 output_addr_const (file, x);
1844 break;
1848 /* Print a memory address as an operand to reference that memory location. */
1850 void
1851 arc_print_operand_address (FILE *file, rtx addr)
1853 register rtx base, index = 0;
1854 int offset = 0;
1856 switch (GET_CODE (addr))
1858 case REG :
1859 fputs (reg_names[REGNO (addr)], file);
1860 break;
1861 case SYMBOL_REF :
1862 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1864 fprintf (file, "%%st(");
1865 output_addr_const (file, addr);
1866 fprintf (file, ")");
1868 else
1869 output_addr_const (file, addr);
1870 break;
1871 case PLUS :
1872 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1873 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1874 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1875 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1876 else
1877 base = XEXP (addr, 0), index = XEXP (addr, 1);
1878 if (GET_CODE (base) != REG)
1879 abort ();
1880 fputs (reg_names[REGNO (base)], file);
1881 if (index == 0)
1883 if (offset != 0)
1884 fprintf (file, ",%d", offset);
1886 else if (GET_CODE (index) == REG)
1887 fprintf (file, ",%s", reg_names[REGNO (index)]);
1888 else if (GET_CODE (index) == SYMBOL_REF)
1889 fputc (',', file), output_addr_const (file, index);
1890 else
1891 abort ();
1892 break;
1893 case PRE_INC :
1894 case PRE_DEC :
1895 /* We shouldn't get here as we've lost the mode of the memory object
1896 (which says how much to inc/dec by. */
1897 abort ();
1898 break;
1899 default :
1900 output_addr_const (file, addr);
1901 break;
1905 /* Update compare/branch separation marker. */
1907 static void
1908 record_cc_ref (rtx insn)
1910 last_insn_set_cc_p = current_insn_set_cc_p;
1912 switch (get_attr_cond (insn))
1914 case COND_SET :
1915 case COND_SET_ZN :
1916 case COND_SET_ZNC :
1917 if (get_attr_length (insn) == 1)
1918 current_insn_set_cc_p = 1;
1919 else
1920 current_insn_set_cc_p = 0;
1921 break;
1922 default :
1923 current_insn_set_cc_p = 0;
1924 break;
1928 /* Conditional execution support.
1930 This is based on the ARM port but for now is much simpler.
1932 A finite state machine takes care of noticing whether or not instructions
1933 can be conditionally executed, and thus decrease execution time and code
1934 size by deleting branch instructions. The fsm is controlled by
1935 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1936 in the .md file for the branch insns also have a hand in this. */
1938 /* The state of the fsm controlling condition codes are:
1939 0: normal, do nothing special
1940 1: don't output this insn
1941 2: don't output this insn
1942 3: make insns conditional
1943 4: make insns conditional
1945 State transitions (state->state by whom, under what condition):
1946 0 -> 1 final_prescan_insn, if insn is conditional branch
1947 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1948 1 -> 3 branch patterns, after having not output the conditional branch
1949 2 -> 4 branch patterns, after having not output the conditional branch
1950 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1951 (the target label has CODE_LABEL_NUMBER equal to
1952 arc_ccfsm_target_label).
1953 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1955 If the jump clobbers the conditions then we use states 2 and 4.
1957 A similar thing can be done with conditional return insns.
1959 We also handle separating branches from sets of the condition code.
1960 This is done here because knowledge of the ccfsm state is required,
1961 we may not be outputting the branch. */
1963 void
1964 arc_final_prescan_insn (rtx insn,
1965 rtx *opvec ATTRIBUTE_UNUSED,
1966 int noperands ATTRIBUTE_UNUSED)
1968 /* BODY will hold the body of INSN. */
1969 register rtx body = PATTERN (insn);
1971 /* This will be 1 if trying to repeat the trick (ie: do the `else' part of
1972 an if/then/else), and things need to be reversed. */
1973 int reverse = 0;
1975 /* If we start with a return insn, we only succeed if we find another one. */
1976 int seeking_return = 0;
1978 /* START_INSN will hold the insn from where we start looking. This is the
1979 first insn after the following code_label if REVERSE is true. */
1980 rtx start_insn = insn;
1982 /* Update compare/branch separation marker. */
1983 record_cc_ref (insn);
1985 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1986 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1987 final_scan_insn which has `optimize' as a local. */
1988 if (optimize < 2 || TARGET_NO_COND_EXEC)
1989 return;
1991 /* If in state 4, check if the target branch is reached, in order to
1992 change back to state 0. */
1993 if (arc_ccfsm_state == 4)
1995 if (insn == arc_ccfsm_target_insn)
1997 arc_ccfsm_target_insn = NULL;
1998 arc_ccfsm_state = 0;
2000 return;
2003 /* If in state 3, it is possible to repeat the trick, if this insn is an
2004 unconditional branch to a label, and immediately following this branch
2005 is the previous target label which is only used once, and the label this
2006 branch jumps to is not too far off. Or in other words "we've done the
2007 `then' part, see if we can do the `else' part." */
2008 if (arc_ccfsm_state == 3)
2010 if (simplejump_p (insn))
2012 start_insn = next_nonnote_insn (start_insn);
2013 if (GET_CODE (start_insn) == BARRIER)
2015 /* ??? Isn't this always a barrier? */
2016 start_insn = next_nonnote_insn (start_insn);
2018 if (GET_CODE (start_insn) == CODE_LABEL
2019 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2020 && LABEL_NUSES (start_insn) == 1)
2021 reverse = TRUE;
2022 else
2023 return;
2025 else if (GET_CODE (body) == RETURN)
2027 start_insn = next_nonnote_insn (start_insn);
2028 if (GET_CODE (start_insn) == BARRIER)
2029 start_insn = next_nonnote_insn (start_insn);
2030 if (GET_CODE (start_insn) == CODE_LABEL
2031 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2032 && LABEL_NUSES (start_insn) == 1)
2034 reverse = TRUE;
2035 seeking_return = 1;
2037 else
2038 return;
2040 else
2041 return;
2044 if (GET_CODE (insn) != JUMP_INSN)
2045 return;
2047 /* This jump might be paralleled with a clobber of the condition codes,
2048 the jump should always come first. */
2049 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2050 body = XVECEXP (body, 0, 0);
2052 if (reverse
2053 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2054 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2056 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2057 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2058 int then_not_else = TRUE;
2059 /* Nonzero if next insn must be the target label. */
2060 int next_must_be_target_label_p;
2061 rtx this_insn = start_insn, label = 0;
2063 /* Register the insn jumped to. */
2064 if (reverse)
2066 if (!seeking_return)
2067 label = XEXP (SET_SRC (body), 0);
2069 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2070 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2071 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2073 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2074 then_not_else = FALSE;
2076 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2077 seeking_return = 1;
2078 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2080 seeking_return = 1;
2081 then_not_else = FALSE;
2083 else
2084 abort ();
2086 /* See how many insns this branch skips, and what kind of insns. If all
2087 insns are okay, and the label or unconditional branch to the same
2088 label is not too far away, succeed. */
2089 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2090 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2091 insns_skipped++)
2093 rtx scanbody;
2095 this_insn = next_nonnote_insn (this_insn);
2096 if (!this_insn)
2097 break;
2099 if (next_must_be_target_label_p)
2101 if (GET_CODE (this_insn) == BARRIER)
2102 continue;
2103 if (GET_CODE (this_insn) == CODE_LABEL
2104 && this_insn == label)
2106 arc_ccfsm_state = 1;
2107 succeed = TRUE;
2109 else
2110 fail = TRUE;
2111 break;
2114 scanbody = PATTERN (this_insn);
2116 switch (GET_CODE (this_insn))
2118 case CODE_LABEL:
2119 /* Succeed if it is the target label, otherwise fail since
2120 control falls in from somewhere else. */
2121 if (this_insn == label)
2123 arc_ccfsm_state = 1;
2124 succeed = TRUE;
2126 else
2127 fail = TRUE;
2128 break;
2130 case BARRIER:
2131 /* Succeed if the following insn is the target label.
2132 Otherwise fail.
2133 If return insns are used then the last insn in a function
2134 will be a barrier. */
2135 next_must_be_target_label_p = TRUE;
2136 break;
2138 case CALL_INSN:
2139 /* Can handle a call insn if there are no insns after it.
2140 IE: The next "insn" is the target label. We don't have to
2141 worry about delay slots as such insns are SEQUENCE's inside
2142 INSN's. ??? It is possible to handle such insns though. */
2143 if (get_attr_cond (this_insn) == COND_CANUSE)
2144 next_must_be_target_label_p = TRUE;
2145 else
2146 fail = TRUE;
2147 break;
2149 case JUMP_INSN:
2150 /* If this is an unconditional branch to the same label, succeed.
2151 If it is to another label, do nothing. If it is conditional,
2152 fail. */
2153 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2155 if (GET_CODE (scanbody) == SET
2156 && GET_CODE (SET_DEST (scanbody)) == PC)
2158 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2159 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2161 arc_ccfsm_state = 2;
2162 succeed = TRUE;
2164 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2165 fail = TRUE;
2167 else if (GET_CODE (scanbody) == RETURN
2168 && seeking_return)
2170 arc_ccfsm_state = 2;
2171 succeed = TRUE;
2173 else if (GET_CODE (scanbody) == PARALLEL)
2175 if (get_attr_cond (this_insn) != COND_CANUSE)
2176 fail = TRUE;
2178 break;
2180 case INSN:
2181 /* We can only do this with insns that can use the condition
2182 codes (and don't set them). */
2183 if (GET_CODE (scanbody) == SET
2184 || GET_CODE (scanbody) == PARALLEL)
2186 if (get_attr_cond (this_insn) != COND_CANUSE)
2187 fail = TRUE;
2189 /* We can't handle other insns like sequences. */
2190 else
2191 fail = TRUE;
2192 break;
2194 default:
2195 break;
2199 if (succeed)
2201 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2202 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2203 else if (seeking_return || arc_ccfsm_state == 2)
2205 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2207 this_insn = next_nonnote_insn (this_insn);
2208 if (this_insn && (GET_CODE (this_insn) == BARRIER
2209 || GET_CODE (this_insn) == CODE_LABEL))
2210 abort ();
2212 if (!this_insn)
2214 /* Oh dear! we ran off the end, give up. */
2215 extract_insn_cached (insn);
2216 arc_ccfsm_state = 0;
2217 arc_ccfsm_target_insn = NULL;
2218 return;
2220 arc_ccfsm_target_insn = this_insn;
2222 else
2223 abort ();
2225 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2226 what it was. */
2227 if (!reverse)
2228 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2229 0));
2231 if (reverse || then_not_else)
2232 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2235 /* Restore recog_data. Getting the attributes of other insns can
2236 destroy this array, but final.c assumes that it remains intact
2237 across this call. */
2238 extract_insn_cached (insn);
2242 /* Record that we are currently outputting label NUM with prefix PREFIX.
2243 It it's the label we're looking for, reset the ccfsm machinery.
2245 Called from (*targetm.asm_out.internal_label). */
2247 void
2248 arc_ccfsm_at_label (const char *prefix, int num)
2250 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2251 && !strcmp (prefix, "L"))
2253 arc_ccfsm_state = 0;
2254 arc_ccfsm_target_insn = NULL_RTX;
2258 /* See if the current insn, which is a conditional branch, is to be
2259 deleted. */
2262 arc_ccfsm_branch_deleted_p (void)
2264 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2265 return 1;
2266 return 0;
2269 /* Record a branch isn't output because subsequent insns can be
2270 conditionalized. */
2272 void
2273 arc_ccfsm_record_branch_deleted (void)
2275 /* Indicate we're conditionalizing insns now. */
2276 arc_ccfsm_state += 2;
2278 /* If the next insn is a subroutine call, we still need a nop between the
2279 cc setter and user. We need to undo the effect of calling record_cc_ref
2280 for the just deleted branch. */
2281 current_insn_set_cc_p = last_insn_set_cc_p;
2284 void
2285 arc_va_start (tree valist, rtx nextarg)
2287 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2288 if (current_function_args_info < 8
2289 && (current_function_args_info & 1))
2290 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2292 std_expand_builtin_va_start (valist, nextarg);
2295 static tree
2296 arc_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
2298 /* All aggregates are passed by reference. All scalar types larger
2299 than 8 bytes are passed by reference. */
2301 if (AGGREGATE_TYPE_P (type) || int_size_in_bytes (type) > 8)
2303 tree type_ptr = build_pointer_type (type);
2304 tree addr = std_gimplify_va_arg_expr (valist, type_ptr, pre_p, post_p);
2305 return build_fold_indirect_ref (addr);
2308 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
2311 /* This is how to output a definition of an internal numbered label where
2312 PREFIX is the class of label and NUM is the number within the class. */
2314 static void
2315 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2317 arc_ccfsm_at_label (prefix, labelno);
2318 default_internal_label (stream, prefix, labelno);
2321 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2323 static void
2324 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2326 #if 0
2327 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2328 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2329 and we'll get another suffix added on if -mmangle-cpu. */
2330 if (TARGET_MANGLE_CPU_LIBGCC)
2332 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2333 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2334 arc_mangle_suffix);
2336 #endif
2339 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2341 static bool
2342 arc_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2344 if (AGGREGATE_TYPE_P (type))
2345 return true;
2346 else
2348 HOST_WIDE_INT size = int_size_in_bytes (type);
2349 return (size == -1 || size > 8);