2009-07-17 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / config / arc / arc.c
blob66709c7f31c9966ee96ec639183666159ac6901a
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
3 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "recog.h"
40 #include "toplev.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
46 /* Which cpu we're compiling for. */
47 int arc_cpu_type;
49 /* Name of mangle string to add to symbols to separate code compiled for each
50 cpu (or NULL). */
51 const char *arc_mangle_cpu;
53 /* Name of text, data, and rodata sections used in varasm.c. */
54 const char *arc_text_section;
55 const char *arc_data_section;
56 const char *arc_rodata_section;
58 /* Array of valid operand punctuation characters. */
59 char arc_punct_chars[256];
61 /* Variables used by arc_final_prescan_insn to implement conditional
62 execution. */
63 static int arc_ccfsm_state;
64 static int arc_ccfsm_current_cc;
65 static rtx arc_ccfsm_target_insn;
66 static int arc_ccfsm_target_label;
68 /* The maximum number of insns skipped which will be conditionalised if
69 possible. */
70 #define MAX_INSNS_SKIPPED 3
72 /* A nop is needed between a 4 byte insn that sets the condition codes and
73 a branch that uses them (the same isn't true for an 8 byte insn that sets
74 the condition codes). Set by arc_final_prescan_insn. Used by
75 arc_print_operand. */
76 static int last_insn_set_cc_p;
77 static int current_insn_set_cc_p;
78 static bool arc_handle_option (size_t, const char *, int);
79 static void record_cc_ref (rtx);
80 static void arc_init_reg_tables (void);
81 static int get_arc_condition_code (rtx);
82 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
83 static bool arc_assemble_integer (rtx, unsigned int, int);
84 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
85 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
86 static void arc_file_start (void);
87 static void arc_internal_label (FILE *, const char *, unsigned long);
88 static void arc_va_start (tree, rtx);
89 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
90 tree, int *, int);
91 static bool arc_rtx_costs (rtx, int, int, int *, bool);
92 static int arc_address_cost (rtx, bool);
93 static void arc_external_libcall (rtx);
94 static bool arc_return_in_memory (const_tree, const_tree);
95 static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
98 /* ARC specific attributs. */
100 static const struct attribute_spec arc_attribute_table[] =
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
104 { NULL, 0, 0, false, false, false, NULL }
107 /* Initialize the GCC target structure. */
108 #undef TARGET_ASM_ALIGNED_HI_OP
109 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
110 #undef TARGET_ASM_ALIGNED_SI_OP
111 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
112 #undef TARGET_ASM_INTEGER
113 #define TARGET_ASM_INTEGER arc_assemble_integer
115 #undef TARGET_ASM_FUNCTION_PROLOGUE
116 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
117 #undef TARGET_ASM_FUNCTION_EPILOGUE
118 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
119 #undef TARGET_ASM_FILE_START
120 #define TARGET_ASM_FILE_START arc_file_start
121 #undef TARGET_ATTRIBUTE_TABLE
122 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
123 #undef TARGET_ASM_INTERNAL_LABEL
124 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
125 #undef TARGET_ASM_EXTERNAL_LIBCALL
126 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
128 #undef TARGET_HANDLE_OPTION
129 #define TARGET_HANDLE_OPTION arc_handle_option
131 #undef TARGET_RTX_COSTS
132 #define TARGET_RTX_COSTS arc_rtx_costs
133 #undef TARGET_ADDRESS_COST
134 #define TARGET_ADDRESS_COST arc_address_cost
136 #undef TARGET_PROMOTE_FUNCTION_ARGS
137 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
138 #undef TARGET_PROMOTE_FUNCTION_RETURN
139 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
140 #undef TARGET_PROMOTE_PROTOTYPES
141 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
143 #undef TARGET_RETURN_IN_MEMORY
144 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
145 #undef TARGET_PASS_BY_REFERENCE
146 #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
147 #undef TARGET_CALLEE_COPIES
148 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
150 #undef TARGET_SETUP_INCOMING_VARARGS
151 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
153 #undef TARGET_EXPAND_BUILTIN_VA_START
154 #define TARGET_EXPAND_BUILTIN_VA_START arc_va_start
156 struct gcc_target targetm = TARGET_INITIALIZER;
158 /* Implement TARGET_HANDLE_OPTION. */
160 static bool
161 arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
163 switch (code)
165 case OPT_mcpu_:
166 return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
168 default:
169 return true;
173 /* Called by OVERRIDE_OPTIONS to initialize various things. */
175 void
176 arc_init (void)
178 char *tmp;
180 /* Set the pseudo-ops for the various standard sections. */
181 arc_text_section = tmp = XNEWVEC (char, strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
182 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
183 arc_data_section = tmp = XNEWVEC (char, strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
184 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
185 arc_rodata_section = tmp = XNEWVEC (char, strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
186 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
188 arc_init_reg_tables ();
190 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
191 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
192 arc_punct_chars['#'] = 1;
193 arc_punct_chars['*'] = 1;
194 arc_punct_chars['?'] = 1;
195 arc_punct_chars['!'] = 1;
196 arc_punct_chars['~'] = 1;
199 /* The condition codes of the ARC, and the inverse function. */
200 static const char *const arc_condition_codes[] =
202 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
203 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
206 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
208 /* Returns the index of the ARC condition code string in
209 `arc_condition_codes'. COMPARISON should be an rtx like
210 `(eq (...) (...))'. */
212 static int
213 get_arc_condition_code (rtx comparison)
215 switch (GET_CODE (comparison))
217 case EQ : return 2;
218 case NE : return 3;
219 case GT : return 10;
220 case LE : return 11;
221 case GE : return 12;
222 case LT : return 13;
223 case GTU : return 14;
224 case LEU : return 15;
225 case LTU : return 6;
226 case GEU : return 7;
227 default : gcc_unreachable ();
229 /*NOTREACHED*/
230 return (42);
233 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
234 return the mode to be used for the comparison. */
236 enum machine_mode
237 arc_select_cc_mode (enum rtx_code op,
238 rtx x ATTRIBUTE_UNUSED,
239 rtx y ATTRIBUTE_UNUSED)
241 switch (op)
243 case EQ :
244 case NE :
245 return CCZNmode;
246 default :
247 switch (GET_CODE (x))
249 case AND :
250 case IOR :
251 case XOR :
252 case SIGN_EXTEND :
253 case ZERO_EXTEND :
254 return CCZNmode;
255 case ASHIFT :
256 case ASHIFTRT :
257 case LSHIFTRT :
258 return CCZNCmode;
259 default:
260 break;
263 return CCmode;
266 /* Vectors to keep interesting information about registers where it can easily
267 be got. We use to use the actual mode value as the bit number, but there
268 is (or may be) more than 32 modes now. Instead we use two tables: one
269 indexed by hard register number, and one indexed by mode. */
271 /* The purpose of arc_mode_class is to shrink the range of modes so that
272 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
273 mapped into one arc_mode_class mode. */
275 enum arc_mode_class {
276 C_MODE,
277 S_MODE, D_MODE, T_MODE, O_MODE,
278 SF_MODE, DF_MODE, TF_MODE, OF_MODE
281 /* Modes for condition codes. */
282 #define C_MODES (1 << (int) C_MODE)
284 /* Modes for single-word and smaller quantities. */
285 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
287 /* Modes for double-word and smaller quantities. */
288 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
290 /* Modes for quad-word and smaller quantities. */
291 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
293 /* Value is 1 if register/mode pair is acceptable on arc. */
295 const unsigned int arc_hard_regno_mode_ok[] = {
296 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
297 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
298 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
299 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
301 /* ??? Leave these as S_MODES for now. */
302 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
303 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
304 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
305 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
308 unsigned int arc_mode_class [NUM_MACHINE_MODES];
310 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
312 static void
313 arc_init_reg_tables (void)
315 int i;
317 for (i = 0; i < NUM_MACHINE_MODES; i++)
319 switch (GET_MODE_CLASS (i))
321 case MODE_INT:
322 case MODE_PARTIAL_INT:
323 case MODE_COMPLEX_INT:
324 if (GET_MODE_SIZE (i) <= 4)
325 arc_mode_class[i] = 1 << (int) S_MODE;
326 else if (GET_MODE_SIZE (i) == 8)
327 arc_mode_class[i] = 1 << (int) D_MODE;
328 else if (GET_MODE_SIZE (i) == 16)
329 arc_mode_class[i] = 1 << (int) T_MODE;
330 else if (GET_MODE_SIZE (i) == 32)
331 arc_mode_class[i] = 1 << (int) O_MODE;
332 else
333 arc_mode_class[i] = 0;
334 break;
335 case MODE_FLOAT:
336 case MODE_COMPLEX_FLOAT:
337 if (GET_MODE_SIZE (i) <= 4)
338 arc_mode_class[i] = 1 << (int) SF_MODE;
339 else if (GET_MODE_SIZE (i) == 8)
340 arc_mode_class[i] = 1 << (int) DF_MODE;
341 else if (GET_MODE_SIZE (i) == 16)
342 arc_mode_class[i] = 1 << (int) TF_MODE;
343 else if (GET_MODE_SIZE (i) == 32)
344 arc_mode_class[i] = 1 << (int) OF_MODE;
345 else
346 arc_mode_class[i] = 0;
347 break;
348 case MODE_CC:
349 arc_mode_class[i] = 1 << (int) C_MODE;
350 break;
351 default:
352 arc_mode_class[i] = 0;
353 break;
357 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
359 if (i < 60)
360 arc_regno_reg_class[i] = GENERAL_REGS;
361 else if (i == 60)
362 arc_regno_reg_class[i] = LPCOUNT_REG;
363 else if (i == 61)
364 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
365 else
366 arc_regno_reg_class[i] = NO_REGS;
370 /* ARC specific attribute support.
372 The ARC has these attributes:
373 interrupt - for interrupt functions
376 /* Handle an "interrupt" attribute; arguments as in
377 struct attribute_spec.handler. */
378 static tree
379 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
380 tree name,
381 tree args,
382 int flags ATTRIBUTE_UNUSED,
383 bool *no_add_attrs)
385 tree value = TREE_VALUE (args);
387 if (TREE_CODE (value) != STRING_CST)
389 warning (OPT_Wattributes,
390 "argument of %qE attribute is not a string constant",
391 name);
392 *no_add_attrs = true;
394 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
395 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
397 warning (OPT_Wattributes,
398 "argument of %qE attribute is not \"ilink1\" or \"ilink2\"",
399 name);
400 *no_add_attrs = true;
403 return NULL_TREE;
407 /* Acceptable arguments to the call insn. */
410 call_address_operand (rtx op, enum machine_mode mode)
412 return (symbolic_operand (op, mode)
413 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
414 || (GET_CODE (op) == REG));
418 call_operand (rtx op, enum machine_mode mode)
420 if (GET_CODE (op) != MEM)
421 return 0;
422 op = XEXP (op, 0);
423 return call_address_operand (op, mode);
426 /* Returns 1 if OP is a symbol reference. */
429 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
431 switch (GET_CODE (op))
433 case SYMBOL_REF:
434 case LABEL_REF:
435 case CONST :
436 return 1;
437 default:
438 return 0;
442 /* Return truth value of statement that OP is a symbolic memory
443 operand of mode MODE. */
446 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
448 if (GET_CODE (op) == SUBREG)
449 op = SUBREG_REG (op);
450 if (GET_CODE (op) != MEM)
451 return 0;
452 op = XEXP (op, 0);
453 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
454 || GET_CODE (op) == LABEL_REF);
457 /* Return true if OP is a short immediate (shimm) value. */
460 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
462 if (GET_CODE (op) != CONST_INT)
463 return 0;
464 return SMALL_INT (INTVAL (op));
467 /* Return true if OP will require a long immediate (limm) value.
468 This is currently only used when calculating length attributes. */
471 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
473 switch (GET_CODE (op))
475 case SYMBOL_REF :
476 case LABEL_REF :
477 case CONST :
478 return 1;
479 case CONST_INT :
480 return !SMALL_INT (INTVAL (op));
481 case CONST_DOUBLE :
482 /* These can happen because large unsigned 32-bit constants are
483 represented this way (the multiplication patterns can cause these
484 to be generated). They also occur for SFmode values. */
485 return 1;
486 default:
487 break;
489 return 0;
492 /* Return true if OP is a MEM that when used as a load or store address will
493 require an 8 byte insn.
494 Load and store instructions don't allow the same possibilities but they're
495 similar enough that this one function will do.
496 This is currently only used when calculating length attributes. */
499 long_immediate_loadstore_operand (rtx op,
500 enum machine_mode mode ATTRIBUTE_UNUSED)
502 if (GET_CODE (op) != MEM)
503 return 0;
505 op = XEXP (op, 0);
506 switch (GET_CODE (op))
508 case SYMBOL_REF :
509 case LABEL_REF :
510 case CONST :
511 return 1;
512 case CONST_INT :
513 /* This must be handled as "st c,[limm]". Ditto for load.
514 Technically, the assembler could translate some possibilities to
515 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
516 assume that it does. */
517 return 1;
518 case CONST_DOUBLE :
519 /* These can happen because large unsigned 32-bit constants are
520 represented this way (the multiplication patterns can cause these
521 to be generated). They also occur for SFmode values. */
522 return 1;
523 case REG :
524 return 0;
525 case PLUS :
526 if (GET_CODE (XEXP (op, 1)) == CONST_INT
527 && !SMALL_INT (INTVAL (XEXP (op, 1))))
528 return 1;
529 return 0;
530 default:
531 break;
533 return 0;
536 /* Return true if OP is an acceptable argument for a single word
537 move source. */
540 move_src_operand (rtx op, enum machine_mode mode)
542 switch (GET_CODE (op))
544 case SYMBOL_REF :
545 case LABEL_REF :
546 case CONST :
547 return 1;
548 case CONST_INT :
549 return (LARGE_INT (INTVAL (op)));
550 case CONST_DOUBLE :
551 /* We can handle DImode integer constants in SImode if the value
552 (signed or unsigned) will fit in 32 bits. This is needed because
553 large unsigned 32-bit constants are represented as CONST_DOUBLEs. */
554 if (mode == SImode)
555 return arc_double_limm_p (op);
556 /* We can handle 32-bit floating point constants. */
557 if (mode == SFmode)
558 return GET_MODE (op) == SFmode;
559 return 0;
560 case REG :
561 return register_operand (op, mode);
562 case SUBREG :
563 /* (subreg (mem ...) ...) can occur here if the inner part was once a
564 pseudo-reg and is now a stack slot. */
565 if (GET_CODE (SUBREG_REG (op)) == MEM)
566 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
567 else
568 return register_operand (op, mode);
569 case MEM :
570 return address_operand (XEXP (op, 0), mode);
571 default :
572 return 0;
576 /* Return true if OP is an acceptable argument for a double word
577 move source. */
580 move_double_src_operand (rtx op, enum machine_mode mode)
582 switch (GET_CODE (op))
584 case REG :
585 return register_operand (op, mode);
586 case SUBREG :
587 /* (subreg (mem ...) ...) can occur here if the inner part was once a
588 pseudo-reg and is now a stack slot. */
589 if (GET_CODE (SUBREG_REG (op)) == MEM)
590 return move_double_src_operand (SUBREG_REG (op), mode);
591 else
592 return register_operand (op, mode);
593 case MEM :
594 /* Disallow auto inc/dec for now. */
595 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
596 || GET_CODE (XEXP (op, 0)) == PRE_INC)
597 return 0;
598 return address_operand (XEXP (op, 0), mode);
599 case CONST_INT :
600 case CONST_DOUBLE :
601 return 1;
602 default :
603 return 0;
607 /* Return true if OP is an acceptable argument for a move destination. */
610 move_dest_operand (rtx op, enum machine_mode mode)
612 switch (GET_CODE (op))
614 case REG :
615 return register_operand (op, mode);
616 case SUBREG :
617 /* (subreg (mem ...) ...) can occur here if the inner part was once a
618 pseudo-reg and is now a stack slot. */
619 if (GET_CODE (SUBREG_REG (op)) == MEM)
620 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
621 else
622 return register_operand (op, mode);
623 case MEM :
624 return address_operand (XEXP (op, 0), mode);
625 default :
626 return 0;
630 /* Return true if OP is valid load with update operand. */
633 load_update_operand (rtx op, enum machine_mode mode)
635 if (GET_CODE (op) != MEM
636 || GET_MODE (op) != mode)
637 return 0;
638 op = XEXP (op, 0);
639 if (GET_CODE (op) != PLUS
640 || GET_MODE (op) != Pmode
641 || !register_operand (XEXP (op, 0), Pmode)
642 || !nonmemory_operand (XEXP (op, 1), Pmode))
643 return 0;
644 return 1;
647 /* Return true if OP is valid store with update operand. */
650 store_update_operand (rtx op, enum machine_mode mode)
652 if (GET_CODE (op) != MEM
653 || GET_MODE (op) != mode)
654 return 0;
655 op = XEXP (op, 0);
656 if (GET_CODE (op) != PLUS
657 || GET_MODE (op) != Pmode
658 || !register_operand (XEXP (op, 0), Pmode)
659 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
660 && SMALL_INT (INTVAL (XEXP (op, 1)))))
661 return 0;
662 return 1;
665 /* Return true if OP is a non-volatile non-immediate operand.
666 Volatile memory refs require a special "cache-bypass" instruction
667 and only the standard movXX patterns are set up to handle them. */
670 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
672 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
673 return 0;
674 return nonimmediate_operand (op, mode);
677 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
678 to check the range carefully since this predicate is used in DImode
679 contexts. */
682 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
684 /* All allowed constants will fit a CONST_INT. */
685 return (GET_CODE (op) == CONST_INT
686 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
689 /* Accept integer operands in the range 0..0xffffffff. We have to check the
690 range carefully since this predicate is used in DImode contexts. Also, we
691 need some extra crud to make it work when hosted on 64-bit machines. */
694 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
696 #if HOST_BITS_PER_WIDE_INT > 32
697 /* All allowed constants will fit a CONST_INT. */
698 return (GET_CODE (op) == CONST_INT
699 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
700 #else
701 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
702 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
703 #endif
706 /* Return 1 if OP is a comparison operator valid for the mode of CC.
707 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
709 Some insns only set a few bits in the condition code. So only allow those
710 comparisons that use the bits that are valid. */
713 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
715 enum rtx_code code;
716 if (!COMPARISON_P (op))
717 return 0;
719 code = GET_CODE (op);
720 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
721 return (code == EQ || code == NE);
722 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
723 return (code == EQ || code == NE
724 || code == LTU || code == GEU || code == GTU || code == LEU);
725 return 1;
728 /* Misc. utilities. */
730 /* X and Y are two things to compare using CODE. Return the rtx
731 for the cc reg in the proper mode. */
734 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
736 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
737 return gen_rtx_REG (mode, 61);
740 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
741 We assume the value can be either signed or unsigned. */
744 arc_double_limm_p (rtx value)
746 HOST_WIDE_INT low, high;
748 gcc_assert (GET_CODE (value) == CONST_DOUBLE);
750 low = CONST_DOUBLE_LOW (value);
751 high = CONST_DOUBLE_HIGH (value);
753 if (low & 0x80000000)
755 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
756 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
757 == - (unsigned HOST_WIDE_INT) 0x80000000)
758 && high == -1));
760 else
762 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
766 /* Do any needed setup for a variadic function. For the ARC, we must
767 create a register parameter block, and then copy any anonymous arguments
768 in registers to memory.
770 CUM has not been updated for the last named argument which has type TYPE
771 and mode MODE, and we rely on this fact.
773 We do things a little weird here. We're supposed to only allocate space
774 for the anonymous arguments. However we need to keep the stack eight byte
775 aligned. So we round the space up if necessary, and leave it to va_start
776 to compensate. */
778 static void
779 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
780 enum machine_mode mode,
781 tree type ATTRIBUTE_UNUSED,
782 int *pretend_size,
783 int no_rtl)
785 int first_anon_arg;
787 /* All BLKmode values are passed by reference. */
788 gcc_assert (mode != BLKmode);
790 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
791 / UNITS_PER_WORD);
793 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
795 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
796 int first_reg_offset = first_anon_arg;
797 /* Size in words to "pretend" allocate. */
798 int size = MAX_ARC_PARM_REGS - first_reg_offset;
799 /* Extra slop to keep stack eight byte aligned. */
800 int align_slop = size & 1;
801 rtx regblock;
803 regblock = gen_rtx_MEM (BLKmode,
804 plus_constant (arg_pointer_rtx,
805 FIRST_PARM_OFFSET (0)
806 + align_slop * UNITS_PER_WORD));
807 set_mem_alias_set (regblock, get_varargs_alias_set ());
808 set_mem_align (regblock, BITS_PER_WORD);
809 move_block_from_reg (first_reg_offset, regblock,
810 MAX_ARC_PARM_REGS - first_reg_offset);
812 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
813 * UNITS_PER_WORD);
817 /* Cost functions. */
819 /* Compute a (partial) cost for rtx X. Return true if the complete
820 cost has been computed, and false if subexpressions should be
821 scanned. In either case, *TOTAL contains the cost result. */
823 static bool
824 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
825 bool speed ATTRIBUTE_UNUSED)
827 switch (code)
829 /* Small integers are as cheap as registers. 4 byte values can
830 be fetched as immediate constants - let's give that the cost
831 of an extra insn. */
832 case CONST_INT:
833 if (SMALL_INT (INTVAL (x)))
835 *total = 0;
836 return true;
838 /* FALLTHRU */
840 case CONST:
841 case LABEL_REF:
842 case SYMBOL_REF:
843 *total = COSTS_N_INSNS (1);
844 return true;
846 case CONST_DOUBLE:
848 rtx high, low;
849 split_double (x, &high, &low);
850 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
851 + !SMALL_INT (INTVAL (low)));
852 return true;
855 /* Encourage synth_mult to find a synthetic multiply when reasonable.
856 If we need more than 12 insns to do a multiply, then go out-of-line,
857 since the call overhead will be < 10% of the cost of the multiply. */
858 case ASHIFT:
859 case ASHIFTRT:
860 case LSHIFTRT:
861 if (TARGET_SHIFTER)
862 *total = COSTS_N_INSNS (1);
863 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
864 *total = COSTS_N_INSNS (16);
865 else
866 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
867 return false;
869 default:
870 return false;
875 /* Provide the costs of an addressing mode that contains ADDR.
876 If ADDR is not a valid address, its cost is irrelevant. */
878 static int
879 arc_address_cost (rtx addr, bool speed ATTRIBUTE_UNUSED)
881 switch (GET_CODE (addr))
883 case REG :
884 return 1;
886 case LABEL_REF :
887 case SYMBOL_REF :
888 case CONST :
889 return 2;
891 case PLUS :
893 register rtx plus0 = XEXP (addr, 0);
894 register rtx plus1 = XEXP (addr, 1);
896 if (GET_CODE (plus0) != REG)
897 break;
899 switch (GET_CODE (plus1))
901 case CONST_INT :
902 return SMALL_INT (INTVAL (plus1)) ? 1 : 2;
903 case CONST :
904 case SYMBOL_REF :
905 case LABEL_REF :
906 return 2;
907 default:
908 break;
910 break;
912 default:
913 break;
916 return 4;
919 /* Function prologue/epilogue handlers. */
921 /* ARC stack frames look like:
923 Before call After call
924 +-----------------------+ +-----------------------+
925 | | | |
926 high | local variables, | | local variables, |
927 mem | reg save area, etc. | | reg save area, etc. |
928 | | | |
929 +-----------------------+ +-----------------------+
930 | | | |
931 | arguments on stack. | | arguments on stack. |
932 | | | |
933 SP+16->+-----------------------+FP+48->+-----------------------+
934 | 4 word save area for | | reg parm save area, |
935 | return addr, prev %fp | | only created for |
936 SP+0->+-----------------------+ | variable argument |
937 | functions |
938 FP+16->+-----------------------+
939 | 4 word save area for |
940 | return addr, prev %fp |
941 FP+0->+-----------------------+
942 | |
943 | local variables |
944 | |
945 +-----------------------+
946 | |
947 | register save area |
948 | |
949 +-----------------------+
950 | |
951 | alloca allocations |
952 | |
953 +-----------------------+
954 | |
955 | arguments on stack |
956 | |
957 SP+16->+-----------------------+
958 low | 4 word save area for |
959 memory | return addr, prev %fp |
960 SP+0->+-----------------------+
962 Notes:
963 1) The "reg parm save area" does not exist for non variable argument fns.
964 The "reg parm save area" can be eliminated completely if we created our
965 own va-arc.h, but that has tradeoffs as well (so it's not done). */
967 /* Structure to be filled in by arc_compute_frame_size with register
968 save masks, and offsets for the current function. */
969 struct arc_frame_info
971 unsigned int total_size; /* # bytes that the entire frame takes up. */
972 unsigned int extra_size; /* # bytes of extra stuff. */
973 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
974 unsigned int args_size; /* # bytes that outgoing arguments take up. */
975 unsigned int reg_size; /* # bytes needed to store regs. */
976 unsigned int var_size; /* # bytes that variables take up. */
977 unsigned int reg_offset; /* Offset from new sp to store regs. */
978 unsigned int gmask; /* Mask of saved gp registers. */
979 int initialized; /* Nonzero if frame size already calculated. */
982 /* Current frame information calculated by arc_compute_frame_size. */
983 static struct arc_frame_info current_frame_info;
985 /* Zero structure to initialize current_frame_info. */
986 static struct arc_frame_info zero_frame_info;
988 /* Type of function DECL.
990 The result is cached. To reset the cache at the end of a function,
991 call with DECL = NULL_TREE. */
993 enum arc_function_type
994 arc_compute_function_type (tree decl)
996 tree a;
997 /* Cached value. */
998 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
999 /* Last function we were called for. */
1000 static tree last_fn = NULL_TREE;
1002 /* Resetting the cached value? */
1003 if (decl == NULL_TREE)
1005 fn_type = ARC_FUNCTION_UNKNOWN;
1006 last_fn = NULL_TREE;
1007 return fn_type;
1010 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1011 return fn_type;
1013 /* Assume we have a normal function (not an interrupt handler). */
1014 fn_type = ARC_FUNCTION_NORMAL;
1016 /* Now see if this is an interrupt handler. */
1017 for (a = DECL_ATTRIBUTES (current_function_decl);
1019 a = TREE_CHAIN (a))
1021 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1023 if (name == get_identifier ("__interrupt__")
1024 && list_length (args) == 1
1025 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1027 tree value = TREE_VALUE (args);
1029 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1030 fn_type = ARC_FUNCTION_ILINK1;
1031 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1032 fn_type = ARC_FUNCTION_ILINK2;
1033 else
1034 gcc_unreachable ();
1035 break;
1039 last_fn = decl;
1040 return fn_type;
1043 #define ILINK1_REGNUM 29
1044 #define ILINK2_REGNUM 30
1045 #define RETURN_ADDR_REGNUM 31
1046 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1047 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1049 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1050 The return address and frame pointer are treated separately.
1051 Don't consider them here. */
1052 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1053 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1054 && (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
1056 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM))
1058 /* Return the bytes needed to compute the frame pointer from the current
1059 stack pointer.
1061 SIZE is the size needed for local variables. */
1063 unsigned int
1064 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1066 int regno;
1067 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1068 unsigned int reg_size, reg_offset;
1069 unsigned int gmask;
1070 enum arc_function_type fn_type;
1071 int interrupt_p;
1073 var_size = size;
1074 args_size = crtl->outgoing_args_size;
1075 pretend_size = crtl->args.pretend_args_size;
1076 extra_size = FIRST_PARM_OFFSET (0);
1077 total_size = extra_size + pretend_size + args_size + var_size;
1078 reg_offset = FIRST_PARM_OFFSET(0) + crtl->outgoing_args_size;
1079 reg_size = 0;
1080 gmask = 0;
1082 /* See if this is an interrupt handler. Call used registers must be saved
1083 for them too. */
1084 fn_type = arc_compute_function_type (current_function_decl);
1085 interrupt_p = ARC_INTERRUPT_P (fn_type);
1087 /* Calculate space needed for registers.
1088 ??? We ignore the extension registers for now. */
1090 for (regno = 0; regno <= 31; regno++)
1092 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1094 reg_size += UNITS_PER_WORD;
1095 gmask |= 1 << regno;
1099 total_size += reg_size;
1101 /* If the only space to allocate is the fp/blink save area this is an
1102 empty frame. However, if we'll be making a function call we need to
1103 allocate a stack frame for our callee's fp/blink save area. */
1104 if (total_size == extra_size
1105 && !MUST_SAVE_RETURN_ADDR)
1106 total_size = extra_size = 0;
1108 total_size = ARC_STACK_ALIGN (total_size);
1110 /* Save computed information. */
1111 current_frame_info.total_size = total_size;
1112 current_frame_info.extra_size = extra_size;
1113 current_frame_info.pretend_size = pretend_size;
1114 current_frame_info.var_size = var_size;
1115 current_frame_info.args_size = args_size;
1116 current_frame_info.reg_size = reg_size;
1117 current_frame_info.reg_offset = reg_offset;
1118 current_frame_info.gmask = gmask;
1119 current_frame_info.initialized = reload_completed;
1121 /* Ok, we're done. */
1122 return total_size;
1125 /* Common code to save/restore registers. */
1127 void
1128 arc_save_restore (FILE *file,
1129 const char *base_reg,
1130 unsigned int offset,
1131 unsigned int gmask,
1132 const char *op)
1134 int regno;
1136 if (gmask == 0)
1137 return;
1139 for (regno = 0; regno <= 31; regno++)
1141 if ((gmask & (1L << regno)) != 0)
1143 fprintf (file, "\t%s %s,[%s,%d]\n",
1144 op, reg_names[regno], base_reg, offset);
1145 offset += UNITS_PER_WORD;
1150 /* Target hook to assemble an integer object. The ARC version needs to
1151 emit a special directive for references to labels and function
1152 symbols. */
1154 static bool
1155 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1157 if (size == UNITS_PER_WORD && aligned_p
1158 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1159 || GET_CODE (x) == LABEL_REF))
1161 fputs ("\t.word\t%st(", asm_out_file);
1162 output_addr_const (asm_out_file, x);
1163 fputs (")\n", asm_out_file);
1164 return true;
1166 return default_assemble_integer (x, size, aligned_p);
1169 /* Set up the stack and frame pointer (if desired) for the function. */
1171 static void
1172 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1174 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1175 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1176 unsigned int gmask = current_frame_info.gmask;
1177 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1179 /* If this is an interrupt handler, set up our stack frame.
1180 ??? Optimize later. */
1181 if (ARC_INTERRUPT_P (fn_type))
1183 fprintf (file, "\t%s interrupt handler\n",
1184 ASM_COMMENT_START);
1185 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1188 /* This is only for the human reader. */
1189 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1190 ASM_COMMENT_START, ASM_COMMENT_START,
1191 current_frame_info.var_size,
1192 current_frame_info.reg_size / 4,
1193 current_frame_info.args_size,
1194 current_frame_info.extra_size);
1196 size = ARC_STACK_ALIGN (size);
1197 size = (! current_frame_info.initialized
1198 ? arc_compute_frame_size (size)
1199 : current_frame_info.total_size);
1201 /* These cases shouldn't happen. Catch them now. */
1202 gcc_assert (size || !gmask);
1204 /* Allocate space for register arguments if this is a variadic function. */
1205 if (current_frame_info.pretend_size != 0)
1206 fprintf (file, "\tsub %s,%s,%d\n",
1207 sp_str, sp_str, current_frame_info.pretend_size);
1209 /* The home-grown ABI says link register is saved first. */
1210 if (MUST_SAVE_RETURN_ADDR)
1211 fprintf (file, "\tst %s,[%s,%d]\n",
1212 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1214 /* Set up the previous frame pointer next (if we need to). */
1215 if (frame_pointer_needed)
1217 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1218 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1221 /* ??? We don't handle the case where the saved regs are more than 252
1222 bytes away from sp. This can be handled by decrementing sp once, saving
1223 the regs, and then decrementing it again. The epilogue doesn't have this
1224 problem as the `ld' insn takes reg+limm values (though it would be more
1225 efficient to avoid reg+limm). */
1227 /* Allocate the stack frame. */
1228 if (size - current_frame_info.pretend_size > 0)
1229 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1230 sp_str, sp_str, size - current_frame_info.pretend_size);
1232 /* Save any needed call-saved regs (and call-used if this is an
1233 interrupt handler). */
1234 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1235 /* The zeroing of these two bits is unnecessary,
1236 but leave this in for clarity. */
1237 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1238 "st");
1240 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1243 /* Do any necessary cleanup after a function to restore stack, frame,
1244 and regs. */
1246 static void
1247 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1249 rtx epilogue_delay = crtl->epilogue_delay_list;
1250 int noepilogue = FALSE;
1251 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1253 /* This is only for the human reader. */
1254 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1256 size = ARC_STACK_ALIGN (size);
1257 size = (!current_frame_info.initialized
1258 ? arc_compute_frame_size (size)
1259 : current_frame_info.total_size);
1261 if (size == 0 && epilogue_delay == 0)
1263 rtx insn = get_last_insn ();
1265 /* If the last insn was a BARRIER, we don't have to write any code
1266 because a jump (aka return) was put there. */
1267 if (GET_CODE (insn) == NOTE)
1268 insn = prev_nonnote_insn (insn);
1269 if (insn && GET_CODE (insn) == BARRIER)
1270 noepilogue = TRUE;
1273 if (!noepilogue)
1275 unsigned int pretend_size = current_frame_info.pretend_size;
1276 unsigned int frame_size = size - pretend_size;
1277 int restored, fp_restored_p;
1278 int can_trust_sp_p = !cfun->calls_alloca;
1279 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1280 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1282 /* ??? There are lots of optimizations that can be done here.
1283 EG: Use fp to restore regs if it's closer.
1284 Maybe in time we'll do them all. For now, always restore regs from
1285 sp, but don't restore sp if we don't have to. */
1287 if (!can_trust_sp_p)
1289 gcc_assert (frame_pointer_needed);
1290 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1291 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1294 /* Restore any saved registers. */
1295 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1296 /* The zeroing of these two bits is unnecessary,
1297 but leave this in for clarity. */
1298 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1299 "ld");
1301 if (MUST_SAVE_RETURN_ADDR)
1302 fprintf (file, "\tld %s,[%s,%d]\n",
1303 reg_names[RETURN_ADDR_REGNUM],
1304 frame_pointer_needed ? fp_str : sp_str,
1305 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1307 /* Keep track of how much of the stack pointer we've restored.
1308 It makes the following a lot more readable. */
1309 restored = 0;
1310 fp_restored_p = 0;
1312 /* We try to emit the epilogue delay slot insn right after the load
1313 of the return address register so that it can execute with the
1314 stack intact. Secondly, loads are delayed. */
1315 /* ??? If stack intactness is important, always emit now. */
1316 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1318 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1319 epilogue_delay = NULL_RTX;
1322 if (frame_pointer_needed)
1324 /* Try to restore the frame pointer in the delay slot. We can't,
1325 however, if any of these is true. */
1326 if (epilogue_delay != NULL_RTX
1327 || !SMALL_INT (frame_size)
1328 || pretend_size
1329 || ARC_INTERRUPT_P (fn_type))
1331 /* Note that we restore fp and sp here! */
1332 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1333 restored += frame_size;
1334 fp_restored_p = 1;
1337 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1338 || ARC_INTERRUPT_P (fn_type))
1340 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1341 restored += frame_size;
1344 /* These must be done before the return insn because the delay slot
1345 does the final stack restore. */
1346 if (ARC_INTERRUPT_P (fn_type))
1348 if (epilogue_delay)
1350 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1354 /* Emit the return instruction. */
1356 static const int regs[4] = {
1357 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1360 /* Update the flags, if returning from an interrupt handler. */
1361 if (ARC_INTERRUPT_P (fn_type))
1362 fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
1363 else
1364 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1367 /* If the only register saved is the return address, we need a
1368 nop, unless we have an instruction to put into it. Otherwise
1369 we don't since reloading multiple registers doesn't reference
1370 the register being loaded. */
1372 if (ARC_INTERRUPT_P (fn_type))
1373 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1374 else if (epilogue_delay != NULL_RTX)
1376 gcc_assert (!frame_pointer_needed || fp_restored_p);
1377 gcc_assert (restored >= size);
1378 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1380 else if (frame_pointer_needed && !fp_restored_p)
1382 gcc_assert (SMALL_INT (frame_size));
1383 /* Note that we restore fp and sp here! */
1384 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1386 else if (restored < size)
1388 gcc_assert (SMALL_INT (size - restored));
1389 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1390 sp_str, sp_str, size - restored);
1392 else
1393 fprintf (file, "\tnop\n");
1396 /* Reset state info for each function. */
1397 current_frame_info = zero_frame_info;
1398 arc_compute_function_type (NULL_TREE);
1401 /* Define the number of delay slots needed for the function epilogue.
1403 Interrupt handlers can't have any epilogue delay slots (it's always needed
1404 for something else, I think). For normal functions, we have to worry about
1405 using call-saved regs as they'll be restored before the delay slot insn.
1406 Functions with non-empty frames already have enough choices for the epilogue
1407 delay slot so for now we only consider functions with empty frames. */
1410 arc_delay_slots_for_epilogue (void)
1412 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1413 return 0;
1414 if (!current_frame_info.initialized)
1415 (void) arc_compute_frame_size (get_frame_size ());
1416 if (current_frame_info.total_size == 0)
1417 return 1;
1418 return 0;
1421 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1422 Any single length instruction which doesn't reference the stack or frame
1423 pointer or any call-saved register is OK. SLOT will always be 0. */
1426 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1428 gcc_assert (!slot);
1430 if (get_attr_length (trial) == 1
1431 /* If registers where saved, presumably there's more than enough
1432 possibilities for the delay slot. The alternative is something
1433 more complicated (of course, if we expanded the epilogue as rtl
1434 this problem would go away). */
1435 /* ??? Note that this will always be true since only functions with
1436 empty frames have epilogue delay slots. See
1437 arc_delay_slots_for_epilogue. */
1438 && current_frame_info.gmask == 0
1439 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1440 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1441 return 1;
1442 return 0;
1445 /* Return true if OP is a shift operator. */
1448 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1450 switch (GET_CODE (op))
1452 case ASHIFTRT:
1453 case LSHIFTRT:
1454 case ASHIFT:
1455 return 1;
1456 default:
1457 return 0;
1461 /* Output the assembler code for doing a shift.
1462 We go to a bit of trouble to generate efficient code as the ARC only has
1463 single bit shifts. This is taken from the h8300 port. We only have one
1464 mode of shifting and can't access individual bytes like the h8300 can, so
1465 this is greatly simplified (at the expense of not generating hyper-
1466 efficient code).
1468 This function is not used if the variable shift insns are present. */
1470 /* ??? We assume the output operand is the same as operand 1.
1471 This can be optimized (deleted) in the case of 1 bit shifts. */
1472 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1473 using it here will give us a chance to play with it. */
1475 const char *
1476 output_shift (rtx *operands)
1478 rtx shift = operands[3];
1479 enum machine_mode mode = GET_MODE (shift);
1480 enum rtx_code code = GET_CODE (shift);
1481 const char *shift_one;
1483 gcc_assert (mode == SImode);
1485 switch (code)
1487 case ASHIFT: shift_one = "asl %0,%0"; break;
1488 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1489 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1490 default: gcc_unreachable ();
1493 if (GET_CODE (operands[2]) != CONST_INT)
1495 if (optimize)
1497 output_asm_insn ("sub.f 0,%2,0", operands);
1498 output_asm_insn ("mov lp_count,%2", operands);
1499 output_asm_insn ("bz 2f", operands);
1501 else
1502 output_asm_insn ("mov %4,%2", operands);
1503 goto shiftloop;
1505 else
1507 int n;
1509 /* If the count is negative, make it 0. */
1510 n = INTVAL (operands[2]);
1511 if (n < 0)
1512 n = 0;
1513 /* If the count is too big, truncate it.
1514 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1515 do the intuitive thing. */
1516 else if (n > GET_MODE_BITSIZE (mode))
1517 n = GET_MODE_BITSIZE (mode);
1519 /* First see if we can do them inline. */
1520 if (n <= 8)
1522 while (--n >= 0)
1523 output_asm_insn (shift_one, operands);
1525 /* See if we can use a rotate/and. */
1526 else if (n == BITS_PER_WORD - 1)
1528 switch (code)
1530 case ASHIFT :
1531 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1532 break;
1533 case ASHIFTRT :
1534 /* The ARC doesn't have a rol insn. Use something else. */
1535 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1536 break;
1537 case LSHIFTRT :
1538 /* The ARC doesn't have a rol insn. Use something else. */
1539 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1540 break;
1541 default:
1542 break;
1545 /* Must loop. */
1546 else
1548 char buf[100];
1550 if (optimize)
1551 output_asm_insn ("mov lp_count,%c2", operands);
1552 else
1553 output_asm_insn ("mov %4,%c2", operands);
1554 shiftloop:
1555 if (optimize)
1557 if (flag_pic)
1558 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1559 ASM_COMMENT_START);
1560 else
1561 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1562 ASM_COMMENT_START);
1563 output_asm_insn (buf, operands);
1564 output_asm_insn ("sr %4,[lp_start]", operands);
1565 output_asm_insn ("add %4,%4,1", operands);
1566 output_asm_insn ("sr %4,[lp_end]", operands);
1567 output_asm_insn ("nop\n\tnop", operands);
1568 if (flag_pic)
1569 fprintf (asm_out_file, "\t%s single insn loop\n",
1570 ASM_COMMENT_START);
1571 else
1572 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1573 ASM_COMMENT_START);
1574 output_asm_insn (shift_one, operands);
1575 fprintf (asm_out_file, "2:\t%s end single insn loop\n",
1576 ASM_COMMENT_START);
1578 else
1580 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1581 ASM_COMMENT_START);
1582 output_asm_insn ("sub.f %4,%4,1", operands);
1583 output_asm_insn ("nop", operands);
1584 output_asm_insn ("bn.nd 2f", operands);
1585 output_asm_insn (shift_one, operands);
1586 output_asm_insn ("b.nd 1b", operands);
1587 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1588 ASM_COMMENT_START);
1593 return "";
1596 /* Nested function support. */
1598 /* Emit RTL insns to initialize the variable parts of a trampoline.
1599 FNADDR is an RTX for the address of the function's pure code.
1600 CXT is an RTX for the static chain value for the function. */
1602 void
1603 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1604 rtx fnaddr ATTRIBUTE_UNUSED,
1605 rtx cxt ATTRIBUTE_UNUSED)
1609 /* Set the cpu type and print out other fancy things,
1610 at the top of the file. */
1612 static void
1613 arc_file_start (void)
1615 default_file_start ();
1616 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1619 /* Print operand X (an rtx) in assembler syntax to file FILE.
1620 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1621 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1623 void
1624 arc_print_operand (FILE *file, rtx x, int code)
1626 switch (code)
1628 case '#' :
1629 /* Conditional branches. For now these are equivalent. */
1630 case '*' :
1631 /* Unconditional branches. Output the appropriate delay slot suffix. */
1632 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1634 /* There's nothing in the delay slot. */
1635 fputs (".nd", file);
1637 else
1639 rtx jump = XVECEXP (final_sequence, 0, 0);
1640 rtx delay = XVECEXP (final_sequence, 0, 1);
1641 if (INSN_ANNULLED_BRANCH_P (jump))
1642 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1643 else
1644 fputs (".d", file);
1646 return;
1647 case '?' : /* with leading "." */
1648 case '!' : /* without leading "." */
1649 /* This insn can be conditionally executed. See if the ccfsm machinery
1650 says it should be conditionalized. */
1651 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1653 /* Is this insn in a delay slot? */
1654 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1656 rtx insn = XVECEXP (final_sequence, 0, 1);
1658 /* If the insn is annulled and is from the target path, we need
1659 to inverse the condition test. */
1660 if (INSN_ANNULLED_BRANCH_P (insn))
1662 if (INSN_FROM_TARGET_P (insn))
1663 fprintf (file, "%s%s",
1664 code == '?' ? "." : "",
1665 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1666 else
1667 fprintf (file, "%s%s",
1668 code == '?' ? "." : "",
1669 arc_condition_codes[arc_ccfsm_current_cc]);
1671 else
1673 /* This insn is executed for either path, so don't
1674 conditionalize it at all. */
1675 ; /* nothing to do */
1678 else
1680 /* This insn isn't in a delay slot. */
1681 fprintf (file, "%s%s",
1682 code == '?' ? "." : "",
1683 arc_condition_codes[arc_ccfsm_current_cc]);
1686 return;
1687 case '~' :
1688 /* Output a nop if we're between a set of the condition codes,
1689 and a conditional branch. */
1690 if (last_insn_set_cc_p)
1691 fputs ("nop\n\t", file);
1692 return;
1693 case 'd' :
1694 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1695 return;
1696 case 'D' :
1697 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1698 (get_arc_condition_code (x))],
1699 file);
1700 return;
1701 case 'R' :
1702 /* Write second word of DImode or DFmode reference,
1703 register or memory. */
1704 if (GET_CODE (x) == REG)
1705 fputs (reg_names[REGNO (x)+1], file);
1706 else if (GET_CODE (x) == MEM)
1708 fputc ('[', file);
1709 /* Handle possible auto-increment. Since it is pre-increment and
1710 we have already done it, we can just use an offset of four. */
1711 /* ??? This is taken from rs6000.c I think. I don't think it is
1712 currently necessary, but keep it around. */
1713 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1714 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1715 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1716 else
1717 output_address (plus_constant (XEXP (x, 0), 4));
1718 fputc (']', file);
1720 else
1721 output_operand_lossage ("invalid operand to %%R code");
1722 return;
1723 case 'S' :
1724 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1725 || GET_CODE (x) == LABEL_REF)
1727 fprintf (file, "%%st(");
1728 output_addr_const (file, x);
1729 fprintf (file, ")");
1730 return;
1732 break;
1733 case 'H' :
1734 case 'L' :
1735 if (GET_CODE (x) == REG)
1737 /* L = least significant word, H = most significant word */
1738 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1739 fputs (reg_names[REGNO (x)], file);
1740 else
1741 fputs (reg_names[REGNO (x)+1], file);
1743 else if (GET_CODE (x) == CONST_INT
1744 || GET_CODE (x) == CONST_DOUBLE)
1746 rtx first, second;
1748 split_double (x, &first, &second);
1749 fprintf (file, "0x%08lx",
1750 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1752 else
1753 output_operand_lossage ("invalid operand to %%H/%%L code");
1754 return;
1755 case 'A' :
1757 char str[30];
1759 gcc_assert (GET_CODE (x) == CONST_DOUBLE
1760 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1762 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1763 fprintf (file, "%s", str);
1764 return;
1766 case 'U' :
1767 /* Output a load/store with update indicator if appropriate. */
1768 if (GET_CODE (x) == MEM)
1770 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1771 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1772 fputs (".a", file);
1774 else
1775 output_operand_lossage ("invalid operand to %%U code");
1776 return;
1777 case 'V' :
1778 /* Output cache bypass indicator for a load/store insn. Volatile memory
1779 refs are defined to use the cache bypass mechanism. */
1780 if (GET_CODE (x) == MEM)
1782 if (MEM_VOLATILE_P (x))
1783 fputs (".di", file);
1785 else
1786 output_operand_lossage ("invalid operand to %%V code");
1787 return;
1788 case 0 :
1789 /* Do nothing special. */
1790 break;
1791 default :
1792 /* Unknown flag. */
1793 output_operand_lossage ("invalid operand output code");
1796 switch (GET_CODE (x))
1798 case REG :
1799 fputs (reg_names[REGNO (x)], file);
1800 break;
1801 case MEM :
1802 fputc ('[', file);
1803 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1804 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1805 GET_MODE_SIZE (GET_MODE (x))));
1806 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1807 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1808 - GET_MODE_SIZE (GET_MODE (x))));
1809 else
1810 output_address (XEXP (x, 0));
1811 fputc (']', file);
1812 break;
1813 case CONST_DOUBLE :
1814 /* We handle SFmode constants here as output_addr_const doesn't. */
1815 if (GET_MODE (x) == SFmode)
1817 REAL_VALUE_TYPE d;
1818 long l;
1820 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1821 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1822 fprintf (file, "0x%08lx", l);
1823 break;
1825 /* Fall through. Let output_addr_const deal with it. */
1826 default :
1827 output_addr_const (file, x);
1828 break;
1832 /* Print a memory address as an operand to reference that memory location. */
1834 void
1835 arc_print_operand_address (FILE *file, rtx addr)
1837 register rtx base, index = 0;
1838 int offset = 0;
1840 switch (GET_CODE (addr))
1842 case REG :
1843 fputs (reg_names[REGNO (addr)], file);
1844 break;
1845 case SYMBOL_REF :
1846 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1848 fprintf (file, "%%st(");
1849 output_addr_const (file, addr);
1850 fprintf (file, ")");
1852 else
1853 output_addr_const (file, addr);
1854 break;
1855 case PLUS :
1856 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1857 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1858 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1859 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1860 else
1861 base = XEXP (addr, 0), index = XEXP (addr, 1);
1862 gcc_assert (GET_CODE (base) == REG);
1863 fputs (reg_names[REGNO (base)], file);
1864 if (index == 0)
1866 if (offset != 0)
1867 fprintf (file, ",%d", offset);
1869 else
1871 switch (GET_CODE (index))
1873 case REG:
1874 fprintf (file, ",%s", reg_names[REGNO (index)]);
1875 break;
1876 case SYMBOL_REF:
1877 fputc (',', file), output_addr_const (file, index);
1878 break;
1879 default:
1880 gcc_unreachable ();
1883 break;
1884 case PRE_INC :
1885 case PRE_DEC :
1886 /* We shouldn't get here as we've lost the mode of the memory object
1887 (which says how much to inc/dec by. */
1888 gcc_unreachable ();
1889 break;
1890 default :
1891 output_addr_const (file, addr);
1892 break;
1896 /* Update compare/branch separation marker. */
1898 static void
1899 record_cc_ref (rtx insn)
1901 last_insn_set_cc_p = current_insn_set_cc_p;
1903 switch (get_attr_cond (insn))
1905 case COND_SET :
1906 case COND_SET_ZN :
1907 case COND_SET_ZNC :
1908 if (get_attr_length (insn) == 1)
1909 current_insn_set_cc_p = 1;
1910 else
1911 current_insn_set_cc_p = 0;
1912 break;
1913 default :
1914 current_insn_set_cc_p = 0;
1915 break;
1919 /* Conditional execution support.
1921 This is based on the ARM port but for now is much simpler.
1923 A finite state machine takes care of noticing whether or not instructions
1924 can be conditionally executed, and thus decrease execution time and code
1925 size by deleting branch instructions. The fsm is controlled by
1926 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1927 in the .md file for the branch insns also have a hand in this. */
1929 /* The state of the fsm controlling condition codes are:
1930 0: normal, do nothing special
1931 1: don't output this insn
1932 2: don't output this insn
1933 3: make insns conditional
1934 4: make insns conditional
1936 State transitions (state->state by whom, under what condition):
1937 0 -> 1 final_prescan_insn, if insn is conditional branch
1938 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1939 1 -> 3 branch patterns, after having not output the conditional branch
1940 2 -> 4 branch patterns, after having not output the conditional branch
1941 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1942 (the target label has CODE_LABEL_NUMBER equal to
1943 arc_ccfsm_target_label).
1944 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1946 If the jump clobbers the conditions then we use states 2 and 4.
1948 A similar thing can be done with conditional return insns.
1950 We also handle separating branches from sets of the condition code.
1951 This is done here because knowledge of the ccfsm state is required,
1952 we may not be outputting the branch. */
1954 void
1955 arc_final_prescan_insn (rtx insn,
1956 rtx *opvec ATTRIBUTE_UNUSED,
1957 int noperands ATTRIBUTE_UNUSED)
1959 /* BODY will hold the body of INSN. */
1960 register rtx body = PATTERN (insn);
1962 /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1963 an if/then/else), and things need to be reversed. */
1964 int reverse = 0;
1966 /* If we start with a return insn, we only succeed if we find another one. */
1967 int seeking_return = 0;
1969 /* START_INSN will hold the insn from where we start looking. This is the
1970 first insn after the following code_label if REVERSE is true. */
1971 rtx start_insn = insn;
1973 /* Update compare/branch separation marker. */
1974 record_cc_ref (insn);
1976 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1977 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1978 final_scan_insn which has `optimize' as a local. */
1979 if (optimize < 2 || TARGET_NO_COND_EXEC)
1980 return;
1982 /* If in state 4, check if the target branch is reached, in order to
1983 change back to state 0. */
1984 if (arc_ccfsm_state == 4)
1986 if (insn == arc_ccfsm_target_insn)
1988 arc_ccfsm_target_insn = NULL;
1989 arc_ccfsm_state = 0;
1991 return;
1994 /* If in state 3, it is possible to repeat the trick, if this insn is an
1995 unconditional branch to a label, and immediately following this branch
1996 is the previous target label which is only used once, and the label this
1997 branch jumps to is not too far off. Or in other words "we've done the
1998 `then' part, see if we can do the `else' part." */
1999 if (arc_ccfsm_state == 3)
2001 if (simplejump_p (insn))
2003 start_insn = next_nonnote_insn (start_insn);
2004 if (GET_CODE (start_insn) == BARRIER)
2006 /* ??? Isn't this always a barrier? */
2007 start_insn = next_nonnote_insn (start_insn);
2009 if (GET_CODE (start_insn) == CODE_LABEL
2010 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2011 && LABEL_NUSES (start_insn) == 1)
2012 reverse = TRUE;
2013 else
2014 return;
2016 else if (GET_CODE (body) == RETURN)
2018 start_insn = next_nonnote_insn (start_insn);
2019 if (GET_CODE (start_insn) == BARRIER)
2020 start_insn = next_nonnote_insn (start_insn);
2021 if (GET_CODE (start_insn) == CODE_LABEL
2022 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2023 && LABEL_NUSES (start_insn) == 1)
2025 reverse = TRUE;
2026 seeking_return = 1;
2028 else
2029 return;
2031 else
2032 return;
2035 if (GET_CODE (insn) != JUMP_INSN)
2036 return;
2038 /* This jump might be paralleled with a clobber of the condition codes,
2039 the jump should always come first. */
2040 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2041 body = XVECEXP (body, 0, 0);
2043 if (reverse
2044 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2045 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2047 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2048 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2049 int then_not_else = TRUE;
2050 /* Nonzero if next insn must be the target label. */
2051 int next_must_be_target_label_p;
2052 rtx this_insn = start_insn, label = 0;
2054 /* Register the insn jumped to. */
2055 if (reverse)
2057 if (!seeking_return)
2058 label = XEXP (SET_SRC (body), 0);
2060 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2061 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2062 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2064 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2065 then_not_else = FALSE;
2067 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2068 seeking_return = 1;
2069 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2071 seeking_return = 1;
2072 then_not_else = FALSE;
2074 else
2075 gcc_unreachable ();
2077 /* See how many insns this branch skips, and what kind of insns. If all
2078 insns are okay, and the label or unconditional branch to the same
2079 label is not too far away, succeed. */
2080 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2081 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2082 insns_skipped++)
2084 rtx scanbody;
2086 this_insn = next_nonnote_insn (this_insn);
2087 if (!this_insn)
2088 break;
2090 if (next_must_be_target_label_p)
2092 if (GET_CODE (this_insn) == BARRIER)
2093 continue;
2094 if (GET_CODE (this_insn) == CODE_LABEL
2095 && this_insn == label)
2097 arc_ccfsm_state = 1;
2098 succeed = TRUE;
2100 else
2101 fail = TRUE;
2102 break;
2105 scanbody = PATTERN (this_insn);
2107 switch (GET_CODE (this_insn))
2109 case CODE_LABEL:
2110 /* Succeed if it is the target label, otherwise fail since
2111 control falls in from somewhere else. */
2112 if (this_insn == label)
2114 arc_ccfsm_state = 1;
2115 succeed = TRUE;
2117 else
2118 fail = TRUE;
2119 break;
2121 case BARRIER:
2122 /* Succeed if the following insn is the target label.
2123 Otherwise fail.
2124 If return insns are used then the last insn in a function
2125 will be a barrier. */
2126 next_must_be_target_label_p = TRUE;
2127 break;
2129 case CALL_INSN:
2130 /* Can handle a call insn if there are no insns after it.
2131 IE: The next "insn" is the target label. We don't have to
2132 worry about delay slots as such insns are SEQUENCE's inside
2133 INSN's. ??? It is possible to handle such insns though. */
2134 if (get_attr_cond (this_insn) == COND_CANUSE)
2135 next_must_be_target_label_p = TRUE;
2136 else
2137 fail = TRUE;
2138 break;
2140 case JUMP_INSN:
2141 /* If this is an unconditional branch to the same label, succeed.
2142 If it is to another label, do nothing. If it is conditional,
2143 fail. */
2144 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2146 if (GET_CODE (scanbody) == SET
2147 && GET_CODE (SET_DEST (scanbody)) == PC)
2149 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2150 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2152 arc_ccfsm_state = 2;
2153 succeed = TRUE;
2155 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2156 fail = TRUE;
2158 else if (GET_CODE (scanbody) == RETURN
2159 && seeking_return)
2161 arc_ccfsm_state = 2;
2162 succeed = TRUE;
2164 else if (GET_CODE (scanbody) == PARALLEL)
2166 if (get_attr_cond (this_insn) != COND_CANUSE)
2167 fail = TRUE;
2169 break;
2171 case INSN:
2172 /* We can only do this with insns that can use the condition
2173 codes (and don't set them). */
2174 if (GET_CODE (scanbody) == SET
2175 || GET_CODE (scanbody) == PARALLEL)
2177 if (get_attr_cond (this_insn) != COND_CANUSE)
2178 fail = TRUE;
2180 /* We can't handle other insns like sequences. */
2181 else
2182 fail = TRUE;
2183 break;
2185 default:
2186 break;
2190 if (succeed)
2192 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2193 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2194 else
2196 gcc_assert (seeking_return || arc_ccfsm_state == 2);
2197 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2199 this_insn = next_nonnote_insn (this_insn);
2200 gcc_assert (!this_insn
2201 || (GET_CODE (this_insn) != BARRIER
2202 && GET_CODE (this_insn) != CODE_LABEL));
2204 if (!this_insn)
2206 /* Oh dear! we ran off the end, give up. */
2207 extract_insn_cached (insn);
2208 arc_ccfsm_state = 0;
2209 arc_ccfsm_target_insn = NULL;
2210 return;
2212 arc_ccfsm_target_insn = this_insn;
2215 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2216 what it was. */
2217 if (!reverse)
2218 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2219 0));
2221 if (reverse || then_not_else)
2222 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2225 /* Restore recog_data. Getting the attributes of other insns can
2226 destroy this array, but final.c assumes that it remains intact
2227 across this call. */
2228 extract_insn_cached (insn);
2232 /* Record that we are currently outputting label NUM with prefix PREFIX.
2233 It it's the label we're looking for, reset the ccfsm machinery.
2235 Called from (*targetm.asm_out.internal_label). */
2237 void
2238 arc_ccfsm_at_label (const char *prefix, int num)
2240 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2241 && !strcmp (prefix, "L"))
2243 arc_ccfsm_state = 0;
2244 arc_ccfsm_target_insn = NULL_RTX;
2248 /* See if the current insn, which is a conditional branch, is to be
2249 deleted. */
2252 arc_ccfsm_branch_deleted_p (void)
2254 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2255 return 1;
2256 return 0;
2259 /* Record a branch isn't output because subsequent insns can be
2260 conditionalized. */
2262 void
2263 arc_ccfsm_record_branch_deleted (void)
2265 /* Indicate we're conditionalizing insns now. */
2266 arc_ccfsm_state += 2;
2268 /* If the next insn is a subroutine call, we still need a nop between the
2269 cc setter and user. We need to undo the effect of calling record_cc_ref
2270 for the just deleted branch. */
2271 current_insn_set_cc_p = last_insn_set_cc_p;
2274 static void
2275 arc_va_start (tree valist, rtx nextarg)
2277 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2278 if (crtl->args.info < 8
2279 && (crtl->args.info & 1))
2280 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2282 std_expand_builtin_va_start (valist, nextarg);
2285 /* This is how to output a definition of an internal numbered label where
2286 PREFIX is the class of label and NUM is the number within the class. */
2288 static void
2289 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2291 arc_ccfsm_at_label (prefix, labelno);
2292 default_internal_label (stream, prefix, labelno);
2295 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2297 static void
2298 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2300 #if 0
2301 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2302 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2303 and we'll get another suffix added on if -mmangle-cpu. */
2304 if (TARGET_MANGLE_CPU_LIBGCC)
2306 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2307 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2308 arc_mangle_suffix);
2310 #endif
2313 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2315 static bool
2316 arc_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2318 if (AGGREGATE_TYPE_P (type))
2319 return true;
2320 else
2322 HOST_WIDE_INT size = int_size_in_bytes (type);
2323 return (size == -1 || size > 8);
2327 /* For ARC, All aggregates and arguments greater than 8 bytes are
2328 passed by reference. */
2330 static bool
2331 arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2332 enum machine_mode mode, const_tree type,
2333 bool named ATTRIBUTE_UNUSED)
2335 unsigned HOST_WIDE_INT size;
2337 if (type)
2339 if (AGGREGATE_TYPE_P (type))
2340 return true;
2341 size = int_size_in_bytes (type);
2343 else
2344 size = GET_MODE_SIZE (mode);
2346 return size > 8;