1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
51 #include "target-def.h"
55 rtx fix_truncqfhi2_libfunc
;
56 rtx fixuns_truncqfhi2_libfunc
;
57 rtx fix_trunchfhi2_libfunc
;
58 rtx fixuns_trunchfhi2_libfunc
;
59 rtx floathiqf2_libfunc
;
60 rtx floatunshiqf2_libfunc
;
61 rtx floathihf2_libfunc
;
62 rtx floatunshihf2_libfunc
;
64 static int c4x_leaf_function
;
66 static const char *const float_reg_names
[] = FLOAT_REGISTER_NAMES
;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map
[FIRST_PSEUDO_REGISTER
] =
75 /* Reg Modes Saved. */
76 R0R1_REGS
, /* R0 QI, QF, HF No. */
77 R0R1_REGS
, /* R1 QI, QF, HF No. */
78 R2R3_REGS
, /* R2 QI, QF, HF No. */
79 R2R3_REGS
, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS
, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS
, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS
, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS
, /* R7 QI, QF, HF QF. */
84 ADDR_REGS
, /* AR0 QI No. */
85 ADDR_REGS
, /* AR1 QI No. */
86 ADDR_REGS
, /* AR2 QI No. */
87 ADDR_REGS
, /* AR3 QI QI. */
88 ADDR_REGS
, /* AR4 QI QI. */
89 ADDR_REGS
, /* AR5 QI QI. */
90 ADDR_REGS
, /* AR6 QI QI. */
91 ADDR_REGS
, /* AR7 QI QI. */
92 DP_REG
, /* DP QI No. */
93 INDEX_REGS
, /* IR0 QI No. */
94 INDEX_REGS
, /* IR1 QI No. */
95 BK_REG
, /* BK QI QI. */
96 SP_REG
, /* SP QI No. */
97 ST_REG
, /* ST CC No. */
98 NO_REGS
, /* DIE/IE No. */
99 NO_REGS
, /* IIE/IF No. */
100 NO_REGS
, /* IIF/IOF No. */
101 INT_REGS
, /* RS QI No. */
102 INT_REGS
, /* RE QI No. */
103 RC_REG
, /* RC QI No. */
104 EXT_REGS
, /* R8 QI, QF, HF QI. */
105 EXT_REGS
, /* R9 QI, QF, HF No. */
106 EXT_REGS
, /* R10 QI, QF, HF No. */
107 EXT_REGS
, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map
[FIRST_PSEUDO_REGISTER
] =
112 /* Reg Modes Saved. */
113 HFmode
, /* R0 QI, QF, HF No. */
114 HFmode
, /* R1 QI, QF, HF No. */
115 HFmode
, /* R2 QI, QF, HF No. */
116 HFmode
, /* R3 QI, QF, HF No. */
117 QFmode
, /* R4 QI, QF, HF QI. */
118 QFmode
, /* R5 QI, QF, HF QI. */
119 QImode
, /* R6 QI, QF, HF QF. */
120 QImode
, /* R7 QI, QF, HF QF. */
121 QImode
, /* AR0 QI No. */
122 QImode
, /* AR1 QI No. */
123 QImode
, /* AR2 QI No. */
124 QImode
, /* AR3 QI QI. */
125 QImode
, /* AR4 QI QI. */
126 QImode
, /* AR5 QI QI. */
127 QImode
, /* AR6 QI QI. */
128 QImode
, /* AR7 QI QI. */
129 VOIDmode
, /* DP QI No. */
130 QImode
, /* IR0 QI No. */
131 QImode
, /* IR1 QI No. */
132 QImode
, /* BK QI QI. */
133 VOIDmode
, /* SP QI No. */
134 VOIDmode
, /* ST CC No. */
135 VOIDmode
, /* DIE/IE No. */
136 VOIDmode
, /* IIE/IF No. */
137 VOIDmode
, /* IIF/IOF No. */
138 QImode
, /* RS QI No. */
139 QImode
, /* RE QI No. */
140 VOIDmode
, /* RC QI No. */
141 QFmode
, /* R8 QI, QF, HF QI. */
142 HFmode
, /* R9 QI, QF, HF No. */
143 HFmode
, /* R10 QI, QF, HF No. */
144 HFmode
, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
154 const char *c4x_rpts_cycles_string
;
155 int c4x_rpts_cycles
= 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string
;
157 int c4x_cpu_version
= 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree
= NULL_TREE
;
162 tree data_tree
= NULL_TREE
;
163 tree pure_tree
= NULL_TREE
;
164 tree noreturn_tree
= NULL_TREE
;
165 tree interrupt_tree
= NULL_TREE
;
167 /* Forward declarations */
168 static int c4x_isr_reg_used_p
PARAMS ((unsigned int));
169 static int c4x_leaf_function_p
PARAMS ((void));
170 static int c4x_assembler_function_p
PARAMS ((void));
171 static int c4x_immed_float_p
PARAMS ((rtx
));
172 static int c4x_a_register
PARAMS ((rtx
));
173 static int c4x_x_register
PARAMS ((rtx
));
174 static int c4x_immed_int_constant
PARAMS ((rtx
));
175 static int c4x_immed_float_constant
PARAMS ((rtx
));
176 static int c4x_K_constant
PARAMS ((rtx
));
177 static int c4x_N_constant
PARAMS ((rtx
));
178 static int c4x_O_constant
PARAMS ((rtx
));
179 static int c4x_R_indirect
PARAMS ((rtx
));
180 static int c4x_S_indirect
PARAMS ((rtx
));
181 static void c4x_S_address_parse
PARAMS ((rtx
, int *, int *, int *, int *));
182 static int c4x_valid_operands
PARAMS ((enum rtx_code
, rtx
*,
183 enum machine_mode
, int));
184 static int c4x_arn_reg_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
185 static int c4x_arn_mem_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
186 static void c4x_check_attribute
PARAMS ((const char *, tree
, tree
, tree
*));
187 static int c4x_r11_set_p
PARAMS ((rtx
));
188 static int c4x_rptb_valid_p
PARAMS ((rtx
, rtx
));
189 static int c4x_label_ref_used_p
PARAMS ((rtx
, rtx
));
190 static tree c4x_handle_fntype_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
191 const struct attribute_spec c4x_attribute_table
[];
192 static void c4x_insert_attributes
PARAMS ((tree
, tree
*));
193 static void c4x_asm_named_section
PARAMS ((const char *, unsigned int));
194 static int c4x_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
195 static void c4x_encode_section_info
PARAMS ((tree
, int));
196 static void c4x_globalize_label
PARAMS ((FILE *, const char *));
198 /* Initialize the GCC target structure. */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
206 #undef TARGET_ATTRIBUTE_TABLE
207 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
209 #undef TARGET_INSERT_ATTRIBUTES
210 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
212 #undef TARGET_INIT_BUILTINS
213 #define TARGET_INIT_BUILTINS c4x_init_builtins
215 #undef TARGET_EXPAND_BUILTIN
216 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
218 #undef TARGET_SCHED_ADJUST_COST
219 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
221 #undef TARGET_ENCODE_SECTION_INFO
222 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
224 #undef TARGET_ASM_GLOBALIZE_LABEL
225 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
227 struct gcc_target targetm
= TARGET_INITIALIZER
;
229 /* Override command line options.
230 Called once after all options have been parsed.
231 Mostly we process the processor
232 type and sometimes adjust other TARGET_ options. */
235 c4x_override_options ()
237 if (c4x_rpts_cycles_string
)
238 c4x_rpts_cycles
= atoi (c4x_rpts_cycles_string
);
243 c4x_cpu_version
= 30;
245 c4x_cpu_version
= 31;
247 c4x_cpu_version
= 32;
249 c4x_cpu_version
= 33;
251 c4x_cpu_version
= 40;
253 c4x_cpu_version
= 44;
255 c4x_cpu_version
= 40;
257 /* -mcpu=xx overrides -m40 etc. */
258 if (c4x_cpu_version_string
)
260 const char *p
= c4x_cpu_version_string
;
262 /* Also allow -mcpu=c30 etc. */
263 if (*p
== 'c' || *p
== 'C')
265 c4x_cpu_version
= atoi (p
);
268 target_flags
&= ~(C30_FLAG
| C31_FLAG
| C32_FLAG
| C33_FLAG
|
269 C40_FLAG
| C44_FLAG
);
271 switch (c4x_cpu_version
)
273 case 30: target_flags
|= C30_FLAG
; break;
274 case 31: target_flags
|= C31_FLAG
; break;
275 case 32: target_flags
|= C32_FLAG
; break;
276 case 33: target_flags
|= C33_FLAG
; break;
277 case 40: target_flags
|= C40_FLAG
; break;
278 case 44: target_flags
|= C44_FLAG
; break;
280 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version
);
281 c4x_cpu_version
= 40;
282 target_flags
|= C40_FLAG
;
285 if (TARGET_C30
|| TARGET_C31
|| TARGET_C32
|| TARGET_C33
)
286 target_flags
|= C3X_FLAG
;
288 target_flags
&= ~C3X_FLAG
;
290 /* Convert foo / 8.0 into foo * 0.125, etc. */
291 set_fast_math_flags (1);
293 /* We should phase out the following at some stage.
294 This provides compatibility with the old -mno-aliases option. */
295 if (! TARGET_ALIASES
&& ! flag_argument_noalias
)
296 flag_argument_noalias
= 1;
300 /* This is called before c4x_override_options. */
303 c4x_optimization_options (level
, size
)
304 int level ATTRIBUTE_UNUSED
;
305 int size ATTRIBUTE_UNUSED
;
307 /* Scheduling before register allocation can screw up global
308 register allocation, especially for functions that use MPY||ADD
309 instructions. The benefit we gain we get by scheduling before
310 register allocation is probably marginal anyhow. */
311 flag_schedule_insns
= 0;
315 /* Write an ASCII string. */
317 #define C4X_ASCII_LIMIT 40
320 c4x_output_ascii (stream
, ptr
, len
)
325 char sbuf
[C4X_ASCII_LIMIT
+ 1];
326 int s
, l
, special
, first
= 1, onlys
;
329 fprintf (stream
, "\t.byte\t");
331 for (s
= l
= 0; len
> 0; --len
, ++ptr
)
335 /* Escape " and \ with a \". */
336 special
= *ptr
== '\"' || *ptr
== '\\';
338 /* If printable - add to buff. */
339 if ((! TARGET_TI
|| ! special
) && *ptr
>= 0x20 && *ptr
< 0x7f)
344 if (s
< C4X_ASCII_LIMIT
- 1)
359 fprintf (stream
, "\"%s\"", sbuf
);
361 if (TARGET_TI
&& l
>= 80 && len
> 1)
363 fprintf (stream
, "\n\t.byte\t");
381 fprintf (stream
, "%d", *ptr
);
383 if (TARGET_TI
&& l
>= 80 && len
> 1)
385 fprintf (stream
, "\n\t.byte\t");
396 fprintf (stream
, "\"%s\"", sbuf
);
399 fputc ('\n', stream
);
404 c4x_hard_regno_mode_ok (regno
, mode
)
406 enum machine_mode mode
;
411 case Pmode
: /* Pointer (24/32 bits). */
413 case QImode
: /* Integer (32 bits). */
414 return IS_INT_REGNO (regno
);
416 case QFmode
: /* Float, Double (32 bits). */
417 case HFmode
: /* Long Double (40 bits). */
418 return IS_EXT_REGNO (regno
);
420 case CCmode
: /* Condition Codes. */
421 case CC_NOOVmode
: /* Condition Codes. */
422 return IS_ST_REGNO (regno
);
424 case HImode
: /* Long Long (64 bits). */
425 /* We need two registers to store long longs. Note that
426 it is much easier to constrain the first register
427 to start on an even boundary. */
428 return IS_INT_REGNO (regno
)
429 && IS_INT_REGNO (regno
+ 1)
433 return 0; /* We don't support these modes. */
439 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
441 c4x_hard_regno_rename_ok (regno1
, regno2
)
445 /* We can not copy call saved registers from mode QI into QF or from
447 if (IS_FLOAT_CALL_SAVED_REGNO (regno1
) && IS_INT_CALL_SAVED_REGNO (regno2
))
449 if (IS_INT_CALL_SAVED_REGNO (regno1
) && IS_FLOAT_CALL_SAVED_REGNO (regno2
))
451 /* We cannot copy from an extended (40 bit) register to a standard
452 (32 bit) register because we only set the condition codes for
453 extended registers. */
454 if (IS_EXT_REGNO (regno1
) && ! IS_EXT_REGNO (regno2
))
456 if (IS_EXT_REGNO (regno2
) && ! IS_EXT_REGNO (regno1
))
461 /* The TI C3x C compiler register argument runtime model uses 6 registers,
462 AR2, R2, R3, RC, RS, RE.
464 The first two floating point arguments (float, double, long double)
465 that are found scanning from left to right are assigned to R2 and R3.
467 The remaining integer (char, short, int, long) or pointer arguments
468 are assigned to the remaining registers in the order AR2, R2, R3,
469 RC, RS, RE when scanning left to right, except for the last named
470 argument prior to an ellipsis denoting variable number of
471 arguments. We don't have to worry about the latter condition since
472 function.c treats the last named argument as anonymous (unnamed).
474 All arguments that cannot be passed in registers are pushed onto
475 the stack in reverse order (right to left). GCC handles that for us.
477 c4x_init_cumulative_args() is called at the start, so we can parse
478 the args to see how many floating point arguments and how many
479 integer (or pointer) arguments there are. c4x_function_arg() is
480 then called (sometimes repeatedly) for each argument (parsed left
481 to right) to obtain the register to pass the argument in, or zero
482 if the argument is to be passed on the stack. Once the compiler is
483 happy, c4x_function_arg_advance() is called.
485 Don't use R0 to pass arguments in, we use 0 to indicate a stack
488 static const int c4x_int_reglist
[3][6] =
490 {AR2_REGNO
, R2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
},
491 {AR2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0},
492 {AR2_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0, 0}
495 static const int c4x_fp_reglist
[2] = {R2_REGNO
, R3_REGNO
};
498 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
499 function whose data type is FNTYPE.
500 For a library call, FNTYPE is 0. */
503 c4x_init_cumulative_args (cum
, fntype
, libname
)
504 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize. */
505 tree fntype
; /* Tree ptr for function decl. */
506 rtx libname
; /* SYMBOL_REF of library name or 0. */
508 tree param
, next_param
;
510 cum
->floats
= cum
->ints
= 0;
517 fprintf (stderr
, "\nc4x_init_cumulative_args (");
520 tree ret_type
= TREE_TYPE (fntype
);
522 fprintf (stderr
, "fntype code = %s, ret code = %s",
523 tree_code_name
[(int) TREE_CODE (fntype
)],
524 tree_code_name
[(int) TREE_CODE (ret_type
)]);
527 fprintf (stderr
, "no fntype");
530 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
533 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
535 for (param
= fntype
? TYPE_ARG_TYPES (fntype
) : 0;
536 param
; param
= next_param
)
540 next_param
= TREE_CHAIN (param
);
542 type
= TREE_VALUE (param
);
543 if (type
&& type
!= void_type_node
)
545 enum machine_mode mode
;
547 /* If the last arg doesn't have void type then we have
548 variable arguments. */
552 if ((mode
= TYPE_MODE (type
)))
554 if (! MUST_PASS_IN_STACK (mode
, type
))
556 /* Look for float, double, or long double argument. */
557 if (mode
== QFmode
|| mode
== HFmode
)
559 /* Look for integer, enumeral, boolean, char, or pointer
561 else if (mode
== QImode
|| mode
== Pmode
)
570 fprintf (stderr
, "%s%s, args = %d)\n",
571 cum
->prototype
? ", prototype" : "",
572 cum
->var
? ", variable args" : "",
577 /* Update the data in CUM to advance over an argument
578 of mode MODE and data type TYPE.
579 (TYPE is null for libcalls where that information may not be available.) */
582 c4x_function_arg_advance (cum
, mode
, type
, named
)
583 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
584 enum machine_mode mode
; /* Current arg mode. */
585 tree type
; /* Type of the arg or 0 if lib support. */
586 int named
; /* Whether or not the argument was named. */
589 fprintf (stderr
, "c4x_function_adv(mode=%s, named=%d)\n\n",
590 GET_MODE_NAME (mode
), named
);
594 && ! MUST_PASS_IN_STACK (mode
, type
))
596 /* Look for float, double, or long double argument. */
597 if (mode
== QFmode
|| mode
== HFmode
)
599 /* Look for integer, enumeral, boolean, char, or pointer argument. */
600 else if (mode
== QImode
|| mode
== Pmode
)
603 else if (! TARGET_MEMPARM
&& ! type
)
605 /* Handle libcall arguments. */
606 if (mode
== QFmode
|| mode
== HFmode
)
608 else if (mode
== QImode
|| mode
== Pmode
)
615 /* Define where to put the arguments to a function. Value is zero to
616 push the argument on the stack, or a hard register in which to
619 MODE is the argument's machine mode.
620 TYPE is the data type of the argument (as a tree).
621 This is null for libcalls where that information may
623 CUM is a variable of type CUMULATIVE_ARGS which gives info about
624 the preceding args and about the function being called.
625 NAMED is nonzero if this argument is a named parameter
626 (otherwise it is an extra parameter matching an ellipsis). */
629 c4x_function_arg (cum
, mode
, type
, named
)
630 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
631 enum machine_mode mode
; /* Current arg mode. */
632 tree type
; /* Type of the arg or 0 if lib support. */
633 int named
; /* != 0 for normal args, == 0 for ... args. */
635 int reg
= 0; /* Default to passing argument on stack. */
639 /* We can handle at most 2 floats in R2, R3. */
640 cum
->maxfloats
= (cum
->floats
> 2) ? 2 : cum
->floats
;
642 /* We can handle at most 6 integers minus number of floats passed
644 cum
->maxints
= (cum
->ints
> 6 - cum
->maxfloats
) ?
645 6 - cum
->maxfloats
: cum
->ints
;
647 /* If there is no prototype, assume all the arguments are integers. */
648 if (! cum
->prototype
)
651 cum
->ints
= cum
->floats
= 0;
655 /* This marks the last argument. We don't need to pass this through
657 if (type
== void_type_node
)
663 && ! MUST_PASS_IN_STACK (mode
, type
))
665 /* Look for float, double, or long double argument. */
666 if (mode
== QFmode
|| mode
== HFmode
)
668 if (cum
->floats
< cum
->maxfloats
)
669 reg
= c4x_fp_reglist
[cum
->floats
];
671 /* Look for integer, enumeral, boolean, char, or pointer argument. */
672 else if (mode
== QImode
|| mode
== Pmode
)
674 if (cum
->ints
< cum
->maxints
)
675 reg
= c4x_int_reglist
[cum
->maxfloats
][cum
->ints
];
678 else if (! TARGET_MEMPARM
&& ! type
)
680 /* We could use a different argument calling model for libcalls,
681 since we're only calling functions in libgcc. Thus we could
682 pass arguments for long longs in registers rather than on the
683 stack. In the meantime, use the odd TI format. We make the
684 assumption that we won't have more than two floating point
685 args, six integer args, and that all the arguments are of the
687 if (mode
== QFmode
|| mode
== HFmode
)
688 reg
= c4x_fp_reglist
[cum
->floats
];
689 else if (mode
== QImode
|| mode
== Pmode
)
690 reg
= c4x_int_reglist
[0][cum
->ints
];
695 fprintf (stderr
, "c4x_function_arg(mode=%s, named=%d",
696 GET_MODE_NAME (mode
), named
);
698 fprintf (stderr
, ", reg=%s", reg_names
[reg
]);
700 fprintf (stderr
, ", stack");
701 fprintf (stderr
, ")\n");
704 return gen_rtx_REG (mode
, reg
);
709 /* C[34]x arguments grow in weird ways (downwards) that the standard
710 varargs stuff can't handle.. */
712 c4x_va_arg (valist
, type
)
717 t
= build (PREDECREMENT_EXPR
, TREE_TYPE (valist
), valist
,
718 build_int_2 (int_size_in_bytes (type
), 0));
719 TREE_SIDE_EFFECTS (t
) = 1;
721 return expand_expr (t
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
726 c4x_isr_reg_used_p (regno
)
729 /* Don't save/restore FP or ST, we handle them separately. */
730 if (regno
== FRAME_POINTER_REGNUM
731 || IS_ST_REGNO (regno
))
734 /* We could be a little smarter abut saving/restoring DP.
735 We'll only save if for the big memory model or if
736 we're paranoid. ;-) */
737 if (IS_DP_REGNO (regno
))
738 return ! TARGET_SMALL
|| TARGET_PARANOID
;
740 /* Only save/restore regs in leaf function that are used. */
741 if (c4x_leaf_function
)
742 return regs_ever_live
[regno
] && fixed_regs
[regno
] == 0;
744 /* Only save/restore regs that are used by the ISR and regs
745 that are likely to be used by functions the ISR calls
746 if they are not fixed. */
747 return IS_EXT_REGNO (regno
)
748 || ((regs_ever_live
[regno
] || call_used_regs
[regno
])
749 && fixed_regs
[regno
] == 0);
754 c4x_leaf_function_p ()
756 /* A leaf function makes no calls, so we only need
757 to save/restore the registers we actually use.
758 For the global variable leaf_function to be set, we need
759 to define LEAF_REGISTERS and all that it entails.
760 Let's check ourselves... */
762 if (lookup_attribute ("leaf_pretend",
763 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
766 /* Use the leaf_pretend attribute at your own risk. This is a hack
767 to speed up ISRs that call a function infrequently where the
768 overhead of saving and restoring the additional registers is not
769 warranted. You must save and restore the additional registers
770 required by the called function. Caveat emptor. Here's enough
773 if (leaf_function_p ())
781 c4x_assembler_function_p ()
785 type
= TREE_TYPE (current_function_decl
);
786 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type
)) != NULL
)
787 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type
)) != NULL
);
792 c4x_interrupt_function_p ()
794 if (lookup_attribute ("interrupt",
795 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
798 /* Look for TI style c_intnn. */
799 return current_function_name
[0] == 'c'
800 && current_function_name
[1] == '_'
801 && current_function_name
[2] == 'i'
802 && current_function_name
[3] == 'n'
803 && current_function_name
[4] == 't'
804 && ISDIGIT (current_function_name
[5])
805 && ISDIGIT (current_function_name
[6]);
809 c4x_expand_prologue ()
812 int size
= get_frame_size ();
815 /* In functions where ar3 is not used but frame pointers are still
816 specified, frame pointers are not adjusted (if >= -O2) and this
817 is used so it won't needlessly push the frame pointer. */
820 /* For __assembler__ function don't build a prologue. */
821 if (c4x_assembler_function_p ())
826 /* For __interrupt__ function build specific prologue. */
827 if (c4x_interrupt_function_p ())
829 c4x_leaf_function
= c4x_leaf_function_p ();
831 insn
= emit_insn (gen_push_st ());
832 RTX_FRAME_RELATED_P (insn
) = 1;
835 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
836 RTX_FRAME_RELATED_P (insn
) = 1;
837 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
838 gen_rtx_REG (QImode
, SP_REGNO
)));
839 RTX_FRAME_RELATED_P (insn
) = 1;
840 /* We require that an ISR uses fewer than 32768 words of
841 local variables, otherwise we have to go to lots of
842 effort to save a register, load it with the desired size,
843 adjust the stack pointer, and then restore the modified
844 register. Frankly, I think it is a poor ISR that
845 requires more than 32767 words of local temporary
848 error ("ISR %s requires %d words of local vars, max is 32767",
849 current_function_name
, size
);
851 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
852 gen_rtx_REG (QImode
, SP_REGNO
),
854 RTX_FRAME_RELATED_P (insn
) = 1;
856 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
858 if (c4x_isr_reg_used_p (regno
))
860 if (regno
== DP_REGNO
)
862 insn
= emit_insn (gen_push_dp ());
863 RTX_FRAME_RELATED_P (insn
) = 1;
867 insn
= emit_insn (gen_pushqi (gen_rtx_REG (QImode
, regno
)));
868 RTX_FRAME_RELATED_P (insn
) = 1;
869 if (IS_EXT_REGNO (regno
))
871 insn
= emit_insn (gen_pushqf
872 (gen_rtx_REG (QFmode
, regno
)));
873 RTX_FRAME_RELATED_P (insn
) = 1;
878 /* We need to clear the repeat mode flag if the ISR is
879 going to use a RPTB instruction or uses the RC, RS, or RE
881 if (regs_ever_live
[RC_REGNO
]
882 || regs_ever_live
[RS_REGNO
]
883 || regs_ever_live
[RE_REGNO
])
885 insn
= emit_insn (gen_andn_st (GEN_INT(~0x100)));
886 RTX_FRAME_RELATED_P (insn
) = 1;
889 /* Reload DP reg if we are paranoid about some turkey
890 violating small memory model rules. */
891 if (TARGET_SMALL
&& TARGET_PARANOID
)
893 insn
= emit_insn (gen_set_ldp_prologue
894 (gen_rtx_REG (QImode
, DP_REGNO
),
895 gen_rtx_SYMBOL_REF (QImode
, "data_sec")));
896 RTX_FRAME_RELATED_P (insn
) = 1;
901 if (frame_pointer_needed
)
904 || (current_function_args_size
!= 0)
907 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
908 RTX_FRAME_RELATED_P (insn
) = 1;
909 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
910 gen_rtx_REG (QImode
, SP_REGNO
)));
911 RTX_FRAME_RELATED_P (insn
) = 1;
916 /* Since ar3 is not used, we don't need to push it. */
922 /* If we use ar3, we need to push it. */
924 if ((size
!= 0) || (current_function_args_size
!= 0))
926 /* If we are omitting the frame pointer, we still have
927 to make space for it so the offsets are correct
928 unless we don't use anything on the stack at all. */
935 /* Local vars are too big, it will take multiple operations
939 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
940 GEN_INT(size
>> 16)));
941 RTX_FRAME_RELATED_P (insn
) = 1;
942 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
943 gen_rtx_REG (QImode
, R1_REGNO
),
945 RTX_FRAME_RELATED_P (insn
) = 1;
949 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
950 GEN_INT(size
& ~0xffff)));
951 RTX_FRAME_RELATED_P (insn
) = 1;
953 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
954 gen_rtx_REG (QImode
, R1_REGNO
),
955 GEN_INT(size
& 0xffff)));
956 RTX_FRAME_RELATED_P (insn
) = 1;
957 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
958 gen_rtx_REG (QImode
, SP_REGNO
),
959 gen_rtx_REG (QImode
, R1_REGNO
)));
960 RTX_FRAME_RELATED_P (insn
) = 1;
964 /* Local vars take up less than 32767 words, so we can directly
966 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
967 gen_rtx_REG (QImode
, SP_REGNO
),
969 RTX_FRAME_RELATED_P (insn
) = 1;
972 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
974 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
976 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
978 if (TARGET_PRESERVE_FLOAT
)
980 insn
= emit_insn (gen_pushqi
981 (gen_rtx_REG (QImode
, regno
)));
982 RTX_FRAME_RELATED_P (insn
) = 1;
984 insn
= emit_insn (gen_pushqf (gen_rtx_REG (QFmode
, regno
)));
985 RTX_FRAME_RELATED_P (insn
) = 1;
987 else if ((! dont_push_ar3
) || (regno
!= AR3_REGNO
))
989 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, regno
)));
990 RTX_FRAME_RELATED_P (insn
) = 1;
999 c4x_expand_epilogue()
1005 int size
= get_frame_size ();
1007 /* For __assembler__ function build no epilogue. */
1008 if (c4x_assembler_function_p ())
1010 insn
= emit_jump_insn (gen_return_from_epilogue ());
1011 RTX_FRAME_RELATED_P (insn
) = 1;
1015 /* For __interrupt__ function build specific epilogue. */
1016 if (c4x_interrupt_function_p ())
1018 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; --regno
)
1020 if (! c4x_isr_reg_used_p (regno
))
1022 if (regno
== DP_REGNO
)
1024 insn
= emit_insn (gen_pop_dp ());
1025 RTX_FRAME_RELATED_P (insn
) = 1;
1029 /* We have to use unspec because the compiler will delete insns
1030 that are not call-saved. */
1031 if (IS_EXT_REGNO (regno
))
1033 insn
= emit_insn (gen_popqf_unspec
1034 (gen_rtx_REG (QFmode
, regno
)));
1035 RTX_FRAME_RELATED_P (insn
) = 1;
1037 insn
= emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode
, regno
)));
1038 RTX_FRAME_RELATED_P (insn
) = 1;
1043 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1044 gen_rtx_REG (QImode
, SP_REGNO
),
1046 RTX_FRAME_RELATED_P (insn
) = 1;
1047 insn
= emit_insn (gen_popqi
1048 (gen_rtx_REG (QImode
, AR3_REGNO
)));
1049 RTX_FRAME_RELATED_P (insn
) = 1;
1051 insn
= emit_insn (gen_pop_st ());
1052 RTX_FRAME_RELATED_P (insn
) = 1;
1053 insn
= emit_jump_insn (gen_return_from_interrupt_epilogue ());
1054 RTX_FRAME_RELATED_P (insn
) = 1;
1058 if (frame_pointer_needed
)
1061 || (current_function_args_size
!= 0)
1065 (gen_movqi (gen_rtx_REG (QImode
, R2_REGNO
),
1066 gen_rtx_MEM (QImode
,
1068 (QImode
, gen_rtx_REG (QImode
,
1071 RTX_FRAME_RELATED_P (insn
) = 1;
1073 /* We already have the return value and the fp,
1074 so we need to add those to the stack. */
1081 /* Since ar3 is not used for anything, we don't need to
1088 dont_pop_ar3
= 0; /* If we use ar3, we need to pop it. */
1089 if (size
|| current_function_args_size
)
1091 /* If we are ommitting the frame pointer, we still have
1092 to make space for it so the offsets are correct
1093 unless we don't use anything on the stack at all. */
1098 /* Now restore the saved registers, putting in the delayed branch
1100 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1102 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1104 if (regno
== AR3_REGNO
&& dont_pop_ar3
)
1107 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
1109 insn
= emit_insn (gen_popqf_unspec
1110 (gen_rtx_REG (QFmode
, regno
)));
1111 RTX_FRAME_RELATED_P (insn
) = 1;
1112 if (TARGET_PRESERVE_FLOAT
)
1114 insn
= emit_insn (gen_popqi_unspec
1115 (gen_rtx_REG (QImode
, regno
)));
1116 RTX_FRAME_RELATED_P (insn
) = 1;
1121 insn
= emit_insn (gen_popqi (gen_rtx_REG (QImode
, regno
)));
1122 RTX_FRAME_RELATED_P (insn
) = 1;
1127 if (frame_pointer_needed
)
1130 || (current_function_args_size
!= 0)
1133 /* Restore the old FP. */
1136 (gen_rtx_REG (QImode
, AR3_REGNO
),
1137 gen_rtx_MEM (QImode
, gen_rtx_REG (QImode
, AR3_REGNO
))));
1139 RTX_FRAME_RELATED_P (insn
) = 1;
1145 /* Local vars are too big, it will take multiple operations
1149 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1150 GEN_INT(size
>> 16)));
1151 RTX_FRAME_RELATED_P (insn
) = 1;
1152 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1153 gen_rtx_REG (QImode
, R3_REGNO
),
1155 RTX_FRAME_RELATED_P (insn
) = 1;
1159 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1160 GEN_INT(size
& ~0xffff)));
1161 RTX_FRAME_RELATED_P (insn
) = 1;
1163 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1164 gen_rtx_REG (QImode
, R3_REGNO
),
1165 GEN_INT(size
& 0xffff)));
1166 RTX_FRAME_RELATED_P (insn
) = 1;
1167 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1168 gen_rtx_REG (QImode
, SP_REGNO
),
1169 gen_rtx_REG (QImode
, R3_REGNO
)));
1170 RTX_FRAME_RELATED_P (insn
) = 1;
1174 /* Local vars take up less than 32768 words, so we can directly
1175 subtract the number. */
1176 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1177 gen_rtx_REG (QImode
, SP_REGNO
),
1179 RTX_FRAME_RELATED_P (insn
) = 1;
1184 insn
= emit_jump_insn (gen_return_indirect_internal
1185 (gen_rtx_REG (QImode
, R2_REGNO
)));
1186 RTX_FRAME_RELATED_P (insn
) = 1;
1190 insn
= emit_jump_insn (gen_return_from_epilogue ());
1191 RTX_FRAME_RELATED_P (insn
) = 1;
1198 c4x_null_epilogue_p ()
1202 if (reload_completed
1203 && ! c4x_assembler_function_p ()
1204 && ! c4x_interrupt_function_p ()
1205 && ! current_function_calls_alloca
1206 && ! current_function_args_size
1208 && ! get_frame_size ())
1210 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1211 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
]
1212 && (regno
!= AR3_REGNO
))
1221 c4x_emit_move_sequence (operands
, mode
)
1223 enum machine_mode mode
;
1225 rtx op0
= operands
[0];
1226 rtx op1
= operands
[1];
1228 if (! reload_in_progress
1231 && ! (stik_const_operand (op1
, mode
) && ! push_operand (op0
, mode
)))
1232 op1
= force_reg (mode
, op1
);
1234 if (GET_CODE (op1
) == LO_SUM
1235 && GET_MODE (op1
) == Pmode
1236 && dp_reg_operand (XEXP (op1
, 0), mode
))
1238 /* expand_increment will sometimes create a LO_SUM immediate
1240 op1
= XEXP (op1
, 1);
1242 else if (symbolic_address_operand (op1
, mode
))
1244 if (TARGET_LOAD_ADDRESS
)
1246 /* Alias analysis seems to do a better job if we force
1247 constant addresses to memory after reload. */
1248 emit_insn (gen_load_immed_address (op0
, op1
));
1253 /* Stick symbol or label address into the constant pool. */
1254 op1
= force_const_mem (Pmode
, op1
);
1257 else if (mode
== HFmode
&& CONSTANT_P (op1
) && ! LEGITIMATE_CONSTANT_P (op1
))
1259 /* We could be a lot smarter about loading some of these
1261 op1
= force_const_mem (mode
, op1
);
1264 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1265 and emit associated (HIGH (SYMREF)) if large memory model.
1266 c4x_legitimize_address could be used to do this,
1267 perhaps by calling validize_address. */
1268 if (TARGET_EXPOSE_LDP
1269 && ! (reload_in_progress
|| reload_completed
)
1270 && GET_CODE (op1
) == MEM
1271 && symbolic_address_operand (XEXP (op1
, 0), Pmode
))
1273 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1275 emit_insn (gen_set_ldp (dp_reg
, XEXP (op1
, 0)));
1276 op1
= change_address (op1
, mode
,
1277 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op1
, 0)));
1280 if (TARGET_EXPOSE_LDP
1281 && ! (reload_in_progress
|| reload_completed
)
1282 && GET_CODE (op0
) == MEM
1283 && symbolic_address_operand (XEXP (op0
, 0), Pmode
))
1285 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1287 emit_insn (gen_set_ldp (dp_reg
, XEXP (op0
, 0)));
1288 op0
= change_address (op0
, mode
,
1289 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op0
, 0)));
1292 if (GET_CODE (op0
) == SUBREG
1293 && mixed_subreg_operand (op0
, mode
))
1295 /* We should only generate these mixed mode patterns
1296 during RTL generation. If we need do it later on
1297 then we'll have to emit patterns that won't clobber CC. */
1298 if (reload_in_progress
|| reload_completed
)
1300 if (GET_MODE (SUBREG_REG (op0
)) == QImode
)
1301 op0
= SUBREG_REG (op0
);
1302 else if (GET_MODE (SUBREG_REG (op0
)) == HImode
)
1304 op0
= copy_rtx (op0
);
1305 PUT_MODE (op0
, QImode
);
1311 emit_insn (gen_storeqf_int_clobber (op0
, op1
));
1317 if (GET_CODE (op1
) == SUBREG
1318 && mixed_subreg_operand (op1
, mode
))
1320 /* We should only generate these mixed mode patterns
1321 during RTL generation. If we need do it later on
1322 then we'll have to emit patterns that won't clobber CC. */
1323 if (reload_in_progress
|| reload_completed
)
1325 if (GET_MODE (SUBREG_REG (op1
)) == QImode
)
1326 op1
= SUBREG_REG (op1
);
1327 else if (GET_MODE (SUBREG_REG (op1
)) == HImode
)
1329 op1
= copy_rtx (op1
);
1330 PUT_MODE (op1
, QImode
);
1336 emit_insn (gen_loadqf_int_clobber (op0
, op1
));
1343 && reg_operand (op0
, mode
)
1344 && const_int_operand (op1
, mode
)
1345 && ! IS_INT16_CONST (INTVAL (op1
))
1346 && ! IS_HIGH_CONST (INTVAL (op1
)))
1348 emit_insn (gen_loadqi_big_constant (op0
, op1
));
1353 && reg_operand (op0
, mode
)
1354 && const_int_operand (op1
, mode
))
1356 emit_insn (gen_loadhi_big_constant (op0
, op1
));
1360 /* Adjust operands in case we have modified them. */
1364 /* Emit normal pattern. */
1370 c4x_emit_libcall (libcall
, code
, dmode
, smode
, noperands
, operands
)
1373 enum machine_mode dmode
;
1374 enum machine_mode smode
;
1386 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 1,
1387 operands
[1], smode
);
1388 equiv
= gen_rtx (code
, dmode
, operands
[1]);
1392 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 2,
1393 operands
[1], smode
, operands
[2], smode
);
1394 equiv
= gen_rtx (code
, dmode
, operands
[1], operands
[2]);
1401 insns
= get_insns ();
1403 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1408 c4x_emit_libcall3 (libcall
, code
, mode
, operands
)
1411 enum machine_mode mode
;
1414 c4x_emit_libcall (libcall
, code
, mode
, mode
, 3, operands
);
1419 c4x_emit_libcall_mulhi (libcall
, code
, mode
, operands
)
1422 enum machine_mode mode
;
1430 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, mode
, 2,
1431 operands
[1], mode
, operands
[2], mode
);
1432 equiv
= gen_rtx_TRUNCATE (mode
,
1433 gen_rtx_LSHIFTRT (HImode
,
1434 gen_rtx_MULT (HImode
,
1435 gen_rtx (code
, HImode
, operands
[1]),
1436 gen_rtx (code
, HImode
, operands
[2])),
1438 insns
= get_insns ();
1440 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1444 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1445 yet use this info. */
1448 c4x_encode_section_info (decl
, first
)
1450 int first ATTRIBUTE_UNUSED
;
1452 if (TREE_CODE (decl
) == FUNCTION_DECL
)
1453 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
1458 c4x_check_legit_addr (mode
, addr
, strict
)
1459 enum machine_mode mode
;
1463 rtx base
= NULL_RTX
; /* Base register (AR0-AR7). */
1464 rtx indx
= NULL_RTX
; /* Index register (IR0,IR1). */
1465 rtx disp
= NULL_RTX
; /* Displacement. */
1468 code
= GET_CODE (addr
);
1471 /* Register indirect with auto increment/decrement. We don't
1472 allow SP here---push_operand should recognize an operand
1473 being pushed on the stack. */
1478 if (mode
!= QImode
&& mode
!= QFmode
)
1482 base
= XEXP (addr
, 0);
1490 rtx op0
= XEXP (addr
, 0);
1491 rtx op1
= XEXP (addr
, 1);
1493 if (mode
!= QImode
&& mode
!= QFmode
)
1497 || (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
))
1499 base
= XEXP (op1
, 0);
1502 if (REG_P (XEXP (op1
, 1)))
1503 indx
= XEXP (op1
, 1);
1505 disp
= XEXP (op1
, 1);
1509 /* Register indirect. */
1514 /* Register indirect with displacement or index. */
1517 rtx op0
= XEXP (addr
, 0);
1518 rtx op1
= XEXP (addr
, 1);
1519 enum rtx_code code0
= GET_CODE (op0
);
1526 base
= op0
; /* Base + index. */
1528 if (IS_INDEX_REG (base
) || IS_ADDR_REG (indx
))
1536 base
= op0
; /* Base + displacement. */
1547 /* Direct addressing with DP register. */
1550 rtx op0
= XEXP (addr
, 0);
1551 rtx op1
= XEXP (addr
, 1);
1553 /* HImode and HFmode direct memory references aren't truly
1554 offsettable (consider case at end of data page). We
1555 probably get better code by loading a pointer and using an
1556 indirect memory reference. */
1557 if (mode
== HImode
|| mode
== HFmode
)
1560 if (!REG_P (op0
) || REGNO (op0
) != DP_REGNO
)
1563 if ((GET_CODE (op1
) == SYMBOL_REF
|| GET_CODE (op1
) == LABEL_REF
))
1566 if (GET_CODE (op1
) == CONST
)
1572 /* Direct addressing with some work for the assembler... */
1574 /* Direct addressing. */
1577 if (! TARGET_EXPOSE_LDP
&& ! strict
&& mode
!= HFmode
&& mode
!= HImode
)
1579 /* These need to be converted to a LO_SUM (...).
1580 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1583 /* Do not allow direct memory access to absolute addresses.
1584 This is more pain than it's worth, especially for the
1585 small memory model where we can't guarantee that
1586 this address is within the data page---we don't want
1587 to modify the DP register in the small memory model,
1588 even temporarily, since an interrupt can sneak in.... */
1592 /* Indirect indirect addressing. */
1597 fatal_insn ("using CONST_DOUBLE for address", addr
);
1603 /* Validate the base register. */
1606 /* Check that the address is offsettable for HImode and HFmode. */
1607 if (indx
&& (mode
== HImode
|| mode
== HFmode
))
1610 /* Handle DP based stuff. */
1611 if (REGNO (base
) == DP_REGNO
)
1613 if (strict
&& ! REGNO_OK_FOR_BASE_P (REGNO (base
)))
1615 else if (! strict
&& ! IS_ADDR_OR_PSEUDO_REG (base
))
1619 /* Now validate the index register. */
1622 if (GET_CODE (indx
) != REG
)
1624 if (strict
&& ! REGNO_OK_FOR_INDEX_P (REGNO (indx
)))
1626 else if (! strict
&& ! IS_INDEX_OR_PSEUDO_REG (indx
))
1630 /* Validate displacement. */
1633 if (GET_CODE (disp
) != CONST_INT
)
1635 if (mode
== HImode
|| mode
== HFmode
)
1637 /* The offset displacement must be legitimate. */
1638 if (! IS_DISP8_OFF_CONST (INTVAL (disp
)))
1643 if (! IS_DISP8_CONST (INTVAL (disp
)))
1646 /* Can't add an index with a disp. */
1655 c4x_legitimize_address (orig
, mode
)
1656 rtx orig ATTRIBUTE_UNUSED
;
1657 enum machine_mode mode ATTRIBUTE_UNUSED
;
1659 if (GET_CODE (orig
) == SYMBOL_REF
1660 || GET_CODE (orig
) == LABEL_REF
)
1662 if (mode
== HImode
|| mode
== HFmode
)
1664 /* We need to force the address into
1665 a register so that it is offsettable. */
1666 rtx addr_reg
= gen_reg_rtx (Pmode
);
1667 emit_move_insn (addr_reg
, orig
);
1672 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1675 emit_insn (gen_set_ldp (dp_reg
, orig
));
1677 return gen_rtx_LO_SUM (Pmode
, dp_reg
, orig
);
1685 /* Provide the costs of an addressing mode that contains ADDR.
1686 If ADDR is not a valid address, its cost is irrelevant.
1687 This is used in cse and loop optimisation to determine
1688 if it is worthwhile storing a common address into a register.
1689 Unfortunately, the C4x address cost depends on other operands. */
1692 c4x_address_cost (addr
)
1695 switch (GET_CODE (addr
))
1706 /* These shouldn't be directly generated. */
1714 rtx op1
= XEXP (addr
, 1);
1716 if (GET_CODE (op1
) == LABEL_REF
|| GET_CODE (op1
) == SYMBOL_REF
)
1717 return TARGET_SMALL
? 3 : 4;
1719 if (GET_CODE (op1
) == CONST
)
1721 rtx offset
= const0_rtx
;
1723 op1
= eliminate_constant_term (op1
, &offset
);
1725 /* ??? These costs need rethinking... */
1726 if (GET_CODE (op1
) == LABEL_REF
)
1729 if (GET_CODE (op1
) != SYMBOL_REF
)
1732 if (INTVAL (offset
) == 0)
1737 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr
);
1743 register rtx op0
= XEXP (addr
, 0);
1744 register rtx op1
= XEXP (addr
, 1);
1746 if (GET_CODE (op0
) != REG
)
1749 switch (GET_CODE (op1
))
1755 /* This cost for REG+REG must be greater than the cost
1756 for REG if we want autoincrement addressing modes. */
1760 /* The following tries to improve GIV combination
1761 in strength reduce but appears not to help. */
1762 if (TARGET_DEVEL
&& IS_UINT5_CONST (INTVAL (op1
)))
1765 if (IS_DISP1_CONST (INTVAL (op1
)))
1768 if (! TARGET_C3X
&& IS_UINT5_CONST (INTVAL (op1
)))
1783 c4x_gen_compare_reg (code
, x
, y
)
1787 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
1790 if (mode
== CC_NOOVmode
1791 && (code
== LE
|| code
== GE
|| code
== LT
|| code
== GT
))
1794 cc_reg
= gen_rtx_REG (mode
, ST_REGNO
);
1795 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
1796 gen_rtx_COMPARE (mode
, x
, y
)));
1801 c4x_output_cbranch (form
, seq
)
1810 static char str
[100];
1814 delay
= XVECEXP (final_sequence
, 0, 1);
1815 delayed
= ! INSN_ANNULLED_BRANCH_P (seq
);
1816 annultrue
= INSN_ANNULLED_BRANCH_P (seq
) && ! INSN_FROM_TARGET_P (delay
);
1817 annulfalse
= INSN_ANNULLED_BRANCH_P (seq
) && INSN_FROM_TARGET_P (delay
);
1820 cp
= &str
[strlen (str
)];
1845 c4x_print_operand (file
, op
, letter
)
1846 FILE *file
; /* File to write to. */
1847 rtx op
; /* Operand to print. */
1848 int letter
; /* %<letter> or 0. */
1855 case '#': /* Delayed. */
1857 fprintf (file
, "d");
1861 code
= GET_CODE (op
);
1864 case 'A': /* Direct address. */
1865 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== CONST
)
1866 fprintf (file
, "@");
1869 case 'H': /* Sethi. */
1870 output_addr_const (file
, op
);
1873 case 'I': /* Reversed condition. */
1874 code
= reverse_condition (code
);
1877 case 'L': /* Log 2 of constant. */
1878 if (code
!= CONST_INT
)
1879 fatal_insn ("c4x_print_operand: %%L inconsistency", op
);
1880 fprintf (file
, "%d", exact_log2 (INTVAL (op
)));
1883 case 'N': /* Ones complement of small constant. */
1884 if (code
!= CONST_INT
)
1885 fatal_insn ("c4x_print_operand: %%N inconsistency", op
);
1886 fprintf (file
, "%d", ~INTVAL (op
));
1889 case 'K': /* Generate ldp(k) if direct address. */
1892 && GET_CODE (XEXP (op
, 0)) == LO_SUM
1893 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == REG
1894 && REGNO (XEXP (XEXP (op
, 0), 0)) == DP_REGNO
)
1896 op1
= XEXP (XEXP (op
, 0), 1);
1897 if (GET_CODE(op1
) == CONST_INT
|| GET_CODE(op1
) == SYMBOL_REF
)
1899 fprintf (file
, "\t%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1900 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1901 fprintf (file
, "\n");
1906 case 'M': /* Generate ldp(k) if direct address. */
1907 if (! TARGET_SMALL
/* Only used in asm statements. */
1909 && (GET_CODE (XEXP (op
, 0)) == CONST
1910 || GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
))
1912 fprintf (file
, "%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1913 output_address (XEXP (op
, 0));
1914 fprintf (file
, "\n\t");
1918 case 'O': /* Offset address. */
1919 if (code
== MEM
&& c4x_autoinc_operand (op
, Pmode
))
1921 else if (code
== MEM
)
1922 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1923 else if (code
== REG
)
1924 fprintf (file
, "%s", reg_names
[REGNO (op
) + 1]);
1926 fatal_insn ("c4x_print_operand: %%O inconsistency", op
);
1929 case 'C': /* Call. */
1932 case 'U': /* Call/callu. */
1933 if (code
!= SYMBOL_REF
)
1934 fprintf (file
, "u");
1944 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1946 fprintf (file
, "%s", float_reg_names
[REGNO (op
)]);
1948 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1952 output_address (XEXP (op
, 0));
1960 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
1961 REAL_VALUE_TO_DECIMAL (r
, str
, -1);
1962 fprintf (file
, "%s", str
);
1967 fprintf (file
, "%d", INTVAL (op
));
1971 fprintf (file
, "ne");
1975 fprintf (file
, "eq");
1979 fprintf (file
, "ge");
1983 fprintf (file
, "gt");
1987 fprintf (file
, "le");
1991 fprintf (file
, "lt");
1995 fprintf (file
, "hs");
1999 fprintf (file
, "hi");
2003 fprintf (file
, "ls");
2007 fprintf (file
, "lo");
2011 output_addr_const (file
, op
);
2015 output_addr_const (file
, XEXP (op
, 0));
2022 fatal_insn ("c4x_print_operand: Bad operand case", op
);
2029 c4x_print_operand_address (file
, addr
)
2033 switch (GET_CODE (addr
))
2036 fprintf (file
, "*%s", reg_names
[REGNO (addr
)]);
2040 fprintf (file
, "*--%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2044 fprintf (file
, "*%s++", reg_names
[REGNO (XEXP (addr
, 0))]);
2049 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2050 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2052 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2053 fprintf (file
, "*%s++(%s)", reg_names
[REGNO (op0
)],
2054 reg_names
[REGNO (op1
)]);
2055 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2056 fprintf (file
, "*%s++(%d)", reg_names
[REGNO (op0
)],
2058 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2059 fprintf (file
, "*%s--(%d)", reg_names
[REGNO (op0
)],
2061 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2062 fprintf (file
, "*%s--(%s)", reg_names
[REGNO (op0
)],
2063 reg_names
[REGNO (op1
)]);
2065 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr
);
2071 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2072 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2074 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2075 fprintf (file
, "*++%s(%s)", reg_names
[REGNO (op0
)],
2076 reg_names
[REGNO (op1
)]);
2077 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2078 fprintf (file
, "*++%s(%d)", reg_names
[REGNO (op0
)],
2080 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2081 fprintf (file
, "*--%s(%d)", reg_names
[REGNO (op0
)],
2083 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2084 fprintf (file
, "*--%s(%s)", reg_names
[REGNO (op0
)],
2085 reg_names
[REGNO (op1
)]);
2087 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr
);
2092 fprintf (file
, "*++%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2096 fprintf (file
, "*%s--", reg_names
[REGNO (XEXP (addr
, 0))]);
2099 case PLUS
: /* Indirect with displacement. */
2101 rtx op0
= XEXP (addr
, 0);
2102 rtx op1
= XEXP (addr
, 1);
2108 if (IS_INDEX_REG (op0
))
2110 fprintf (file
, "*+%s(%s)",
2111 reg_names
[REGNO (op1
)],
2112 reg_names
[REGNO (op0
)]); /* Index + base. */
2116 fprintf (file
, "*+%s(%s)",
2117 reg_names
[REGNO (op0
)],
2118 reg_names
[REGNO (op1
)]); /* Base + index. */
2121 else if (INTVAL (op1
) < 0)
2123 fprintf (file
, "*-%s(%d)",
2124 reg_names
[REGNO (op0
)],
2125 -INTVAL (op1
)); /* Base - displacement. */
2129 fprintf (file
, "*+%s(%d)",
2130 reg_names
[REGNO (op0
)],
2131 INTVAL (op1
)); /* Base + displacement. */
2135 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2141 rtx op0
= XEXP (addr
, 0);
2142 rtx op1
= XEXP (addr
, 1);
2144 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2145 c4x_print_operand_address (file
, op1
);
2147 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2154 fprintf (file
, "@");
2155 output_addr_const (file
, addr
);
2158 /* We shouldn't access CONST_INT addresses. */
2162 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2168 /* Return nonzero if the floating point operand will fit
2169 in the immediate field. */
2172 c4x_immed_float_p (op
)
2179 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2180 if (GET_MODE (op
) == HFmode
)
2181 REAL_VALUE_TO_TARGET_DOUBLE (r
, convval
);
2184 REAL_VALUE_TO_TARGET_SINGLE (r
, convval
[0]);
2188 /* Sign extend exponent. */
2189 exponent
= (((convval
[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2190 if (exponent
== -128)
2192 if ((convval
[0] & 0x00000fff) != 0 || convval
[1] != 0)
2193 return 0; /* Precision doesn't fit. */
2194 return (exponent
<= 7) /* Positive exp. */
2195 && (exponent
>= -7); /* Negative exp. */
2199 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2200 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2202 None of the last four instructions from the bottom of the block can
2203 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2204 BcondAT or RETIcondD.
2206 This routine scans the four previous insns for a jump insn, and if
2207 one is found, returns 1 so that we bung in a nop instruction.
2208 This simple minded strategy will add a nop, when it may not
2209 be required. Say when there is a JUMP_INSN near the end of the
2210 block that doesn't get converted into a delayed branch.
2212 Note that we cannot have a call insn, since we don't generate
2213 repeat loops with calls in them (although I suppose we could, but
2214 there's no benefit.)
2216 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2219 c4x_rptb_nop_p (insn
)
2225 /* Extract the start label from the jump pattern (rptb_end). */
2226 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2228 /* If there is a label at the end of the loop we must insert
2231 insn
= previous_insn (insn
);
2232 } while (GET_CODE (insn
) == NOTE
2233 || GET_CODE (insn
) == USE
2234 || GET_CODE (insn
) == CLOBBER
);
2235 if (GET_CODE (insn
) == CODE_LABEL
)
2238 for (i
= 0; i
< 4; i
++)
2240 /* Search back for prev non-note and non-label insn. */
2241 while (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == CODE_LABEL
2242 || GET_CODE (insn
) == USE
|| GET_CODE (insn
) == CLOBBER
)
2244 if (insn
== start_label
)
2247 insn
= previous_insn (insn
);
2250 /* If we have a jump instruction we should insert a NOP. If we
2251 hit repeat block top we should only insert a NOP if the loop
2253 if (GET_CODE (insn
) == JUMP_INSN
)
2255 insn
= previous_insn (insn
);
2261 /* The C4x looping instruction needs to be emitted at the top of the
2262 loop. Emitting the true RTL for a looping instruction at the top of
2263 the loop can cause problems with flow analysis. So instead, a dummy
2264 doloop insn is emitted at the end of the loop. This routine checks
2265 for the presence of this doloop insn and then searches back to the
2266 top of the loop, where it inserts the true looping insn (provided
2267 there are no instructions in the loop which would cause problems).
2268 Any additional labels can be emitted at this point. In addition, if
2269 the desired loop count register was not allocated, this routine does
2272 Before we can create a repeat block looping instruction we have to
2273 verify that there are no jumps outside the loop and no jumps outside
2274 the loop go into this loop. This can happen in the basic blocks reorder
2275 pass. The C4x cpu can not handle this. */
2278 c4x_label_ref_used_p (x
, code_label
)
2288 code
= GET_CODE (x
);
2289 if (code
== LABEL_REF
)
2290 return INSN_UID (XEXP (x
,0)) == INSN_UID (code_label
);
2292 fmt
= GET_RTX_FORMAT (code
);
2293 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2297 if (c4x_label_ref_used_p (XEXP (x
, i
), code_label
))
2300 else if (fmt
[i
] == 'E')
2301 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2302 if (c4x_label_ref_used_p (XVECEXP (x
, i
, j
), code_label
))
2310 c4x_rptb_valid_p (insn
, start_label
)
2311 rtx insn
, start_label
;
2317 /* Find the start label. */
2318 for (; insn
; insn
= PREV_INSN (insn
))
2319 if (insn
== start_label
)
2322 /* Note found then we can not use a rptb or rpts. The label was
2323 probably moved by the basic block reorder pass. */
2328 /* If any jump jumps inside this block then we must fail. */
2329 for (insn
= PREV_INSN (start
); insn
; insn
= PREV_INSN (insn
))
2331 if (GET_CODE (insn
) == CODE_LABEL
)
2333 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2334 if (GET_CODE (tmp
) == JUMP_INSN
2335 && c4x_label_ref_used_p (tmp
, insn
))
2339 for (insn
= NEXT_INSN (end
); insn
; insn
= NEXT_INSN (insn
))
2341 if (GET_CODE (insn
) == CODE_LABEL
)
2343 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2344 if (GET_CODE (tmp
) == JUMP_INSN
2345 && c4x_label_ref_used_p (tmp
, insn
))
2349 /* If any jump jumps outside this block then we must fail. */
2350 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
2352 if (GET_CODE (insn
) == CODE_LABEL
)
2354 for (tmp
= NEXT_INSN (end
); tmp
; tmp
= NEXT_INSN(tmp
))
2355 if (GET_CODE (tmp
) == JUMP_INSN
2356 && c4x_label_ref_used_p (tmp
, insn
))
2358 for (tmp
= PREV_INSN (start
); tmp
; tmp
= PREV_INSN(tmp
))
2359 if (GET_CODE (tmp
) == JUMP_INSN
2360 && c4x_label_ref_used_p (tmp
, insn
))
2365 /* All checks OK. */
2371 c4x_rptb_insert (insn
)
2376 rtx new_start_label
;
2379 /* If the count register has not been allocated to RC, say if
2380 there is a movstr pattern in the loop, then do not insert a
2381 RPTB instruction. Instead we emit a decrement and branch
2382 at the end of the loop. */
2383 count_reg
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 0), 0);
2384 if (REGNO (count_reg
) != RC_REGNO
)
2387 /* Extract the start label from the jump pattern (rptb_end). */
2388 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2390 if (! c4x_rptb_valid_p (insn
, start_label
))
2392 /* We can not use the rptb insn. Replace it so reorg can use
2393 the delay slots of the jump insn. */
2394 emit_insn_before (gen_addqi3 (count_reg
, count_reg
, GEN_INT (-1)), insn
);
2395 emit_insn_before (gen_cmpqi (count_reg
, GEN_INT (0)), insn
);
2396 emit_insn_before (gen_bge (start_label
), insn
);
2397 LABEL_NUSES (start_label
)++;
2402 end_label
= gen_label_rtx ();
2403 LABEL_NUSES (end_label
)++;
2404 emit_label_after (end_label
, insn
);
2406 new_start_label
= gen_label_rtx ();
2407 LABEL_NUSES (new_start_label
)++;
2409 for (; insn
; insn
= PREV_INSN (insn
))
2411 if (insn
== start_label
)
2413 if (GET_CODE (insn
) == JUMP_INSN
&&
2414 JUMP_LABEL (insn
) == start_label
)
2415 redirect_jump (insn
, new_start_label
, 0);
2418 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label
);
2420 emit_label_after (new_start_label
, insn
);
2422 if (TARGET_RPTS
&& c4x_rptb_rpts_p (PREV_INSN (insn
), 0))
2423 emit_insn_after (gen_rpts_top (new_start_label
, end_label
), insn
);
2425 emit_insn_after (gen_rptb_top (new_start_label
, end_label
), insn
);
2426 if (LABEL_NUSES (start_label
) == 0)
2427 delete_insn (start_label
);
2431 /* This function is a C4x special called immediately before delayed
2432 branch scheduling. We fix up RTPB style loops that didn't get RC
2433 allocated as the loop counter. */
2436 c4x_process_after_reload (first
)
2441 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2443 /* Look for insn. */
2446 int insn_code_number
;
2449 insn_code_number
= recog_memoized (insn
);
2451 if (insn_code_number
< 0)
2454 /* Insert the RTX for RPTB at the top of the loop
2455 and a label at the end of the loop. */
2456 if (insn_code_number
== CODE_FOR_rptb_end
)
2457 c4x_rptb_insert(insn
);
2459 /* We need to split the insn here. Otherwise the calls to
2460 force_const_mem will not work for load_immed_address. */
2463 /* Don't split the insn if it has been deleted. */
2464 if (! INSN_DELETED_P (old
))
2465 insn
= try_split (PATTERN(old
), old
, 1);
2467 /* When not optimizing, the old insn will be still left around
2468 with only the 'deleted' bit set. Transform it into a note
2469 to avoid confusion of subsequent processing. */
2470 if (INSN_DELETED_P (old
))
2472 PUT_CODE (old
, NOTE
);
2473 NOTE_LINE_NUMBER (old
) = NOTE_INSN_DELETED
;
2474 NOTE_SOURCE_FILE (old
) = 0;
2485 return REG_P (op
) && IS_ADDR_OR_PSEUDO_REG (op
);
2493 return REG_P (op
) && IS_INDEX_OR_PSEUDO_REG (op
);
2498 c4x_immed_int_constant (op
)
2501 if (GET_CODE (op
) != CONST_INT
)
2504 return GET_MODE (op
) == VOIDmode
2505 || GET_MODE_CLASS (op
) == MODE_INT
2506 || GET_MODE_CLASS (op
) == MODE_PARTIAL_INT
;
2511 c4x_immed_float_constant (op
)
2514 if (GET_CODE (op
) != CONST_DOUBLE
)
2517 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2518 present this only means that a MEM rtx has been generated. It does
2519 not mean the rtx is really in memory. */
2521 return GET_MODE (op
) == QFmode
|| GET_MODE (op
) == HFmode
;
2526 c4x_shiftable_constant (op
)
2531 int val
= INTVAL (op
);
2533 for (i
= 0; i
< 16; i
++)
2538 mask
= ((0xffff >> i
) << 16) | 0xffff;
2539 if (IS_INT16_CONST (val
& (1 << 31) ? (val
>> i
) | ~mask
2540 : (val
>> i
) & mask
))
2550 return c4x_immed_float_constant (op
) && c4x_immed_float_p (op
);
2558 return c4x_immed_int_constant (op
) && IS_INT16_CONST (INTVAL (op
));
2568 return c4x_immed_int_constant (op
) && IS_INT8_CONST (INTVAL (op
));
2576 if (TARGET_C3X
|| ! c4x_immed_int_constant (op
))
2578 return IS_INT5_CONST (INTVAL (op
));
2586 return c4x_immed_int_constant (op
) && IS_UINT16_CONST (INTVAL (op
));
2594 return c4x_immed_int_constant (op
) && IS_NOT_UINT16_CONST (INTVAL (op
));
2602 return c4x_immed_int_constant (op
) && IS_HIGH_CONST (INTVAL (op
));
2606 /* The constraints do not have to check the register class,
2607 except when needed to discriminate between the constraints.
2608 The operand has been checked by the predicates to be valid. */
2610 /* ARx + 9-bit signed const or IRn
2611 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2612 We don't include the pre/post inc/dec forms here since
2613 they are handled by the <> constraints. */
2616 c4x_Q_constraint (op
)
2619 enum machine_mode mode
= GET_MODE (op
);
2621 if (GET_CODE (op
) != MEM
)
2624 switch (GET_CODE (op
))
2631 rtx op0
= XEXP (op
, 0);
2632 rtx op1
= XEXP (op
, 1);
2640 if (GET_CODE (op1
) != CONST_INT
)
2643 /* HImode and HFmode must be offsettable. */
2644 if (mode
== HImode
|| mode
== HFmode
)
2645 return IS_DISP8_OFF_CONST (INTVAL (op1
));
2647 return IS_DISP8_CONST (INTVAL (op1
));
2658 /* ARx + 5-bit unsigned const
2659 *ARx, *+ARx(n) for n < 32. */
2662 c4x_R_constraint (op
)
2665 enum machine_mode mode
= GET_MODE (op
);
2669 if (GET_CODE (op
) != MEM
)
2672 switch (GET_CODE (op
))
2679 rtx op0
= XEXP (op
, 0);
2680 rtx op1
= XEXP (op
, 1);
2685 if (GET_CODE (op1
) != CONST_INT
)
2688 /* HImode and HFmode must be offsettable. */
2689 if (mode
== HImode
|| mode
== HFmode
)
2690 return IS_UINT5_CONST (INTVAL (op1
) + 1);
2692 return IS_UINT5_CONST (INTVAL (op1
));
2707 enum machine_mode mode
= GET_MODE (op
);
2709 if (TARGET_C3X
|| GET_CODE (op
) != MEM
)
2713 switch (GET_CODE (op
))
2716 return IS_ADDR_OR_PSEUDO_REG (op
);
2720 rtx op0
= XEXP (op
, 0);
2721 rtx op1
= XEXP (op
, 1);
2723 /* HImode and HFmode must be offsettable. */
2724 if (mode
== HImode
|| mode
== HFmode
)
2725 return IS_ADDR_OR_PSEUDO_REG (op0
)
2726 && GET_CODE (op1
) == CONST_INT
2727 && IS_UINT5_CONST (INTVAL (op1
) + 1);
2730 && IS_ADDR_OR_PSEUDO_REG (op0
)
2731 && GET_CODE (op1
) == CONST_INT
2732 && IS_UINT5_CONST (INTVAL (op1
));
2743 /* ARx + 1-bit unsigned const or IRn
2744 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2745 We don't include the pre/post inc/dec forms here since
2746 they are handled by the <> constraints. */
2749 c4x_S_constraint (op
)
2752 enum machine_mode mode
= GET_MODE (op
);
2753 if (GET_CODE (op
) != MEM
)
2756 switch (GET_CODE (op
))
2764 rtx op0
= XEXP (op
, 0);
2765 rtx op1
= XEXP (op
, 1);
2767 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2768 || (op0
!= XEXP (op1
, 0)))
2771 op0
= XEXP (op1
, 0);
2772 op1
= XEXP (op1
, 1);
2773 return REG_P (op0
) && REG_P (op1
);
2774 /* Pre or post_modify with a displacement of 0 or 1
2775 should not be generated. */
2781 rtx op0
= XEXP (op
, 0);
2782 rtx op1
= XEXP (op
, 1);
2790 if (GET_CODE (op1
) != CONST_INT
)
2793 /* HImode and HFmode must be offsettable. */
2794 if (mode
== HImode
|| mode
== HFmode
)
2795 return IS_DISP1_OFF_CONST (INTVAL (op1
));
2797 return IS_DISP1_CONST (INTVAL (op1
));
2812 enum machine_mode mode
= GET_MODE (op
);
2813 if (GET_CODE (op
) != MEM
)
2817 switch (GET_CODE (op
))
2821 if (mode
!= QImode
&& mode
!= QFmode
)
2828 return IS_ADDR_OR_PSEUDO_REG (op
);
2833 rtx op0
= XEXP (op
, 0);
2834 rtx op1
= XEXP (op
, 1);
2836 if (mode
!= QImode
&& mode
!= QFmode
)
2839 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2840 || (op0
!= XEXP (op1
, 0)))
2843 op0
= XEXP (op1
, 0);
2844 op1
= XEXP (op1
, 1);
2845 return REG_P (op0
) && IS_ADDR_OR_PSEUDO_REG (op0
)
2846 && REG_P (op1
) && IS_INDEX_OR_PSEUDO_REG (op1
);
2847 /* Pre or post_modify with a displacement of 0 or 1
2848 should not be generated. */
2853 rtx op0
= XEXP (op
, 0);
2854 rtx op1
= XEXP (op
, 1);
2858 /* HImode and HFmode must be offsettable. */
2859 if (mode
== HImode
|| mode
== HFmode
)
2860 return IS_ADDR_OR_PSEUDO_REG (op0
)
2861 && GET_CODE (op1
) == CONST_INT
2862 && IS_DISP1_OFF_CONST (INTVAL (op1
));
2865 return (IS_INDEX_OR_PSEUDO_REG (op1
)
2866 && IS_ADDR_OR_PSEUDO_REG (op0
))
2867 || (IS_ADDR_OR_PSEUDO_REG (op1
)
2868 && IS_INDEX_OR_PSEUDO_REG (op0
));
2870 return IS_ADDR_OR_PSEUDO_REG (op0
)
2871 && GET_CODE (op1
) == CONST_INT
2872 && IS_DISP1_CONST (INTVAL (op1
));
2884 /* Direct memory operand. */
2887 c4x_T_constraint (op
)
2890 if (GET_CODE (op
) != MEM
)
2894 if (GET_CODE (op
) != LO_SUM
)
2896 /* Allow call operands. */
2897 return GET_CODE (op
) == SYMBOL_REF
2898 && GET_MODE (op
) == Pmode
2899 && SYMBOL_REF_FLAG (op
);
2902 /* HImode and HFmode are not offsettable. */
2903 if (GET_MODE (op
) == HImode
|| GET_CODE (op
) == HFmode
)
2906 if ((GET_CODE (XEXP (op
, 0)) == REG
)
2907 && (REGNO (XEXP (op
, 0)) == DP_REGNO
))
2908 return c4x_U_constraint (XEXP (op
, 1));
2914 /* Symbolic operand. */
2917 c4x_U_constraint (op
)
2920 /* Don't allow direct addressing to an arbitrary constant. */
2921 return GET_CODE (op
) == CONST
2922 || GET_CODE (op
) == SYMBOL_REF
2923 || GET_CODE (op
) == LABEL_REF
;
2928 c4x_autoinc_operand (op
, mode
)
2930 enum machine_mode mode ATTRIBUTE_UNUSED
;
2932 if (GET_CODE (op
) == MEM
)
2934 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
2940 || code
== PRE_MODIFY
2941 || code
== POST_MODIFY
2949 /* Match any operand. */
2952 any_operand (op
, mode
)
2953 register rtx op ATTRIBUTE_UNUSED
;
2954 enum machine_mode mode ATTRIBUTE_UNUSED
;
2960 /* Nonzero if OP is a floating point value with value 0.0. */
2963 fp_zero_operand (op
, mode
)
2965 enum machine_mode mode ATTRIBUTE_UNUSED
;
2969 if (GET_CODE (op
) != CONST_DOUBLE
)
2971 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2972 return REAL_VALUES_EQUAL (r
, dconst0
);
2977 const_operand (op
, mode
)
2979 register enum machine_mode mode
;
2985 if (GET_CODE (op
) != CONST_DOUBLE
2986 || GET_MODE (op
) != mode
2987 || GET_MODE_CLASS (mode
) != MODE_FLOAT
)
2990 return c4x_immed_float_p (op
);
2996 if (GET_CODE (op
) == CONSTANT_P_RTX
)
2999 if (GET_CODE (op
) != CONST_INT
3000 || (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != mode
)
3001 || GET_MODE_CLASS (mode
) != MODE_INT
)
3004 return IS_HIGH_CONST (INTVAL (op
)) || IS_INT16_CONST (INTVAL (op
));
3016 stik_const_operand (op
, mode
)
3018 enum machine_mode mode ATTRIBUTE_UNUSED
;
3020 return c4x_K_constant (op
);
3025 not_const_operand (op
, mode
)
3027 enum machine_mode mode ATTRIBUTE_UNUSED
;
3029 return c4x_N_constant (op
);
3034 reg_operand (op
, mode
)
3036 enum machine_mode mode
;
3038 if (GET_CODE (op
) == SUBREG
3039 && GET_MODE (op
) == QFmode
)
3041 return register_operand (op
, mode
);
3046 mixed_subreg_operand (op
, mode
)
3048 enum machine_mode mode ATTRIBUTE_UNUSED
;
3050 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3051 int and a long double. */
3052 if (GET_CODE (op
) == SUBREG
3053 && (GET_MODE (op
) == QFmode
)
3054 && (GET_MODE (SUBREG_REG (op
)) == QImode
3055 || GET_MODE (SUBREG_REG (op
)) == HImode
))
3062 reg_imm_operand (op
, mode
)
3064 enum machine_mode mode ATTRIBUTE_UNUSED
;
3066 if (REG_P (op
) || CONSTANT_P (op
))
3073 not_modify_reg (op
, mode
)
3075 enum machine_mode mode ATTRIBUTE_UNUSED
;
3077 if (REG_P (op
) || CONSTANT_P (op
))
3079 if (GET_CODE (op
) != MEM
)
3082 switch (GET_CODE (op
))
3089 rtx op0
= XEXP (op
, 0);
3090 rtx op1
= XEXP (op
, 1);
3095 if (REG_P (op1
) || GET_CODE (op1
) == CONST_INT
)
3101 rtx op0
= XEXP (op
, 0);
3103 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
3121 not_rc_reg (op
, mode
)
3123 enum machine_mode mode ATTRIBUTE_UNUSED
;
3125 if (REG_P (op
) && REGNO (op
) == RC_REGNO
)
3131 /* Extended precision register R0-R1. */
3134 r0r1_reg_operand (op
, mode
)
3136 enum machine_mode mode
;
3138 if (! reg_operand (op
, mode
))
3140 if (GET_CODE (op
) == SUBREG
)
3141 op
= SUBREG_REG (op
);
3142 return REG_P (op
) && IS_R0R1_OR_PSEUDO_REG (op
);
3146 /* Extended precision register R2-R3. */
3149 r2r3_reg_operand (op
, mode
)
3151 enum machine_mode mode
;
3153 if (! reg_operand (op
, mode
))
3155 if (GET_CODE (op
) == SUBREG
)
3156 op
= SUBREG_REG (op
);
3157 return REG_P (op
) && IS_R2R3_OR_PSEUDO_REG (op
);
3161 /* Low extended precision register R0-R7. */
3164 ext_low_reg_operand (op
, mode
)
3166 enum machine_mode mode
;
3168 if (! reg_operand (op
, mode
))
3170 if (GET_CODE (op
) == SUBREG
)
3171 op
= SUBREG_REG (op
);
3172 return REG_P (op
) && IS_EXT_LOW_OR_PSEUDO_REG (op
);
3176 /* Extended precision register. */
3179 ext_reg_operand (op
, mode
)
3181 enum machine_mode mode
;
3183 if (! reg_operand (op
, mode
))
3185 if (GET_CODE (op
) == SUBREG
)
3186 op
= SUBREG_REG (op
);
3189 return IS_EXT_OR_PSEUDO_REG (op
);
3193 /* Standard precision register. */
3196 std_reg_operand (op
, mode
)
3198 enum machine_mode mode
;
3200 if (! reg_operand (op
, mode
))
3202 if (GET_CODE (op
) == SUBREG
)
3203 op
= SUBREG_REG (op
);
3204 return REG_P (op
) && IS_STD_OR_PSEUDO_REG (op
);
3207 /* Standard precision or normal register. */
3210 std_or_reg_operand (op
, mode
)
3212 enum machine_mode mode
;
3214 if (reload_in_progress
)
3215 return std_reg_operand (op
, mode
);
3216 return reg_operand (op
, mode
);
3219 /* Address register. */
3222 addr_reg_operand (op
, mode
)
3224 enum machine_mode mode
;
3226 if (! reg_operand (op
, mode
))
3228 return c4x_a_register (op
);
3232 /* Index register. */
3235 index_reg_operand (op
, mode
)
3237 enum machine_mode mode
;
3239 if (! reg_operand (op
, mode
))
3241 if (GET_CODE (op
) == SUBREG
)
3242 op
= SUBREG_REG (op
);
3243 return c4x_x_register (op
);
3250 dp_reg_operand (op
, mode
)
3252 enum machine_mode mode ATTRIBUTE_UNUSED
;
3254 return REG_P (op
) && IS_DP_OR_PSEUDO_REG (op
);
3261 sp_reg_operand (op
, mode
)
3263 enum machine_mode mode ATTRIBUTE_UNUSED
;
3265 return REG_P (op
) && IS_SP_OR_PSEUDO_REG (op
);
3272 st_reg_operand (op
, mode
)
3274 enum machine_mode mode ATTRIBUTE_UNUSED
;
3276 return REG_P (op
) && IS_ST_OR_PSEUDO_REG (op
);
3283 rc_reg_operand (op
, mode
)
3285 enum machine_mode mode ATTRIBUTE_UNUSED
;
3287 return REG_P (op
) && IS_RC_OR_PSEUDO_REG (op
);
3292 call_address_operand (op
, mode
)
3294 enum machine_mode mode ATTRIBUTE_UNUSED
;
3296 return (REG_P (op
) || symbolic_address_operand (op
, mode
));
3300 /* Symbolic address operand. */
3303 symbolic_address_operand (op
, mode
)
3305 enum machine_mode mode ATTRIBUTE_UNUSED
;
3307 switch (GET_CODE (op
))
3319 /* Check dst operand of a move instruction. */
3322 dst_operand (op
, mode
)
3324 enum machine_mode mode
;
3326 if (GET_CODE (op
) == SUBREG
3327 && mixed_subreg_operand (op
, mode
))
3331 return reg_operand (op
, mode
);
3333 return nonimmediate_operand (op
, mode
);
3337 /* Check src operand of two operand arithmetic instructions. */
3340 src_operand (op
, mode
)
3342 enum machine_mode mode
;
3344 if (GET_CODE (op
) == SUBREG
3345 && mixed_subreg_operand (op
, mode
))
3349 return reg_operand (op
, mode
);
3351 if (mode
== VOIDmode
)
3352 mode
= GET_MODE (op
);
3354 if (GET_CODE (op
) == CONST_INT
)
3355 return (mode
== QImode
|| mode
== Pmode
|| mode
== HImode
)
3356 && c4x_I_constant (op
);
3358 /* We don't like CONST_DOUBLE integers. */
3359 if (GET_CODE (op
) == CONST_DOUBLE
)
3360 return c4x_H_constant (op
);
3362 /* Disallow symbolic addresses. Only the predicate
3363 symbolic_address_operand will match these. */
3364 if (GET_CODE (op
) == SYMBOL_REF
3365 || GET_CODE (op
) == LABEL_REF
3366 || GET_CODE (op
) == CONST
)
3369 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3370 access to symbolic addresses. These operands will get forced
3371 into a register and the movqi expander will generate a
3372 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3373 if (GET_CODE (op
) == MEM
3374 && ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
3375 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
3376 || GET_CODE (XEXP (op
, 0)) == CONST
)))
3377 return ! TARGET_LOAD_DIRECT_MEMS
&& GET_MODE (op
) == mode
;
3379 return general_operand (op
, mode
);
3384 src_hi_operand (op
, mode
)
3386 enum machine_mode mode
;
3388 if (c4x_O_constant (op
))
3390 return src_operand (op
, mode
);
3394 /* Check src operand of two operand logical instructions. */
3397 lsrc_operand (op
, mode
)
3399 enum machine_mode mode
;
3401 if (mode
== VOIDmode
)
3402 mode
= GET_MODE (op
);
3404 if (mode
!= QImode
&& mode
!= Pmode
)
3405 fatal_insn ("mode not QImode", op
);
3407 if (GET_CODE (op
) == CONST_INT
)
3408 return c4x_L_constant (op
) || c4x_J_constant (op
);
3410 return src_operand (op
, mode
);
3414 /* Check src operand of two operand tricky instructions. */
3417 tsrc_operand (op
, mode
)
3419 enum machine_mode mode
;
3421 if (mode
== VOIDmode
)
3422 mode
= GET_MODE (op
);
3424 if (mode
!= QImode
&& mode
!= Pmode
)
3425 fatal_insn ("mode not QImode", op
);
3427 if (GET_CODE (op
) == CONST_INT
)
3428 return c4x_L_constant (op
) || c4x_N_constant (op
) || c4x_J_constant (op
);
3430 return src_operand (op
, mode
);
3434 /* Check src operand of two operand non immedidate instructions. */
3437 nonimmediate_src_operand (op
, mode
)
3439 enum machine_mode mode
;
3441 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3444 return src_operand (op
, mode
);
3448 /* Check logical src operand of two operand non immedidate instructions. */
3451 nonimmediate_lsrc_operand (op
, mode
)
3453 enum machine_mode mode
;
3455 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3458 return lsrc_operand (op
, mode
);
3463 reg_or_const_operand (op
, mode
)
3465 enum machine_mode mode
;
3467 return reg_operand (op
, mode
) || const_operand (op
, mode
);
3471 /* Check for indirect operands allowable in parallel instruction. */
3474 par_ind_operand (op
, mode
)
3476 enum machine_mode mode
;
3478 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3481 return c4x_S_indirect (op
);
3485 /* Check for operands allowable in parallel instruction. */
3488 parallel_operand (op
, mode
)
3490 enum machine_mode mode
;
3492 return ext_low_reg_operand (op
, mode
) || par_ind_operand (op
, mode
);
3497 c4x_S_address_parse (op
, base
, incdec
, index
, disp
)
3509 if (GET_CODE (op
) != MEM
)
3510 fatal_insn ("invalid indirect memory address", op
);
3513 switch (GET_CODE (op
))
3516 *base
= REGNO (XEXP (op
, 0));
3522 *base
= REGNO (XEXP (op
, 0));
3528 *base
= REGNO (XEXP (op
, 0));
3534 *base
= REGNO (XEXP (op
, 0));
3540 *base
= REGNO (XEXP (op
, 0));
3541 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3543 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3544 *disp
= 0; /* ??? */
3547 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3552 *base
= REGNO (XEXP (op
, 0));
3553 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3555 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3556 *disp
= 1; /* ??? */
3559 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3570 rtx op0
= XEXP (op
, 0);
3571 rtx op1
= XEXP (op
, 1);
3573 if (c4x_a_register (op0
))
3575 if (c4x_x_register (op1
))
3577 *base
= REGNO (op0
);
3578 *index
= REGNO (op1
);
3581 else if ((GET_CODE (op1
) == CONST_INT
3582 && IS_DISP1_CONST (INTVAL (op1
))))
3584 *base
= REGNO (op0
);
3585 *disp
= INTVAL (op1
);
3589 else if (c4x_x_register (op0
) && c4x_a_register (op1
))
3591 *base
= REGNO (op1
);
3592 *index
= REGNO (op0
);
3599 fatal_insn ("invalid indirect (S) memory address", op
);
3605 c4x_address_conflict (op0
, op1
, store0
, store1
)
3620 if (MEM_VOLATILE_P (op0
) && MEM_VOLATILE_P (op1
))
3623 c4x_S_address_parse (op0
, &base0
, &incdec0
, &index0
, &disp0
);
3624 c4x_S_address_parse (op1
, &base1
, &incdec1
, &index1
, &disp1
);
3626 if (store0
&& store1
)
3628 /* If we have two stores in parallel to the same address, then
3629 the C4x only executes one of the stores. This is unlikely to
3630 cause problems except when writing to a hardware device such
3631 as a FIFO since the second write will be lost. The user
3632 should flag the hardware location as being volatile so that
3633 we don't do this optimisation. While it is unlikely that we
3634 have an aliased address if both locations are not marked
3635 volatile, it is probably safer to flag a potential conflict
3636 if either location is volatile. */
3637 if (! flag_argument_noalias
)
3639 if (MEM_VOLATILE_P (op0
) || MEM_VOLATILE_P (op1
))
3644 /* If have a parallel load and a store to the same address, the load
3645 is performed first, so there is no conflict. Similarly, there is
3646 no conflict if have parallel loads from the same address. */
3648 /* Cannot use auto increment or auto decrement twice for same
3650 if (base0
== base1
&& incdec0
&& incdec0
)
3653 /* It might be too confusing for GCC if we have use a base register
3654 with a side effect and a memory reference using the same register
3656 if (! TARGET_DEVEL
&& base0
== base1
&& (incdec0
|| incdec1
))
3659 /* We can not optimize the case where op1 and op2 refer to the same
3661 if (base0
== base1
&& disp0
== disp1
&& index0
== index1
)
3669 /* Check for while loop inside a decrement and branch loop. */
3672 c4x_label_conflict (insn
, jump
, db
)
3679 if (GET_CODE (insn
) == CODE_LABEL
)
3681 if (CODE_LABEL_NUMBER (jump
) == CODE_LABEL_NUMBER (insn
))
3683 if (CODE_LABEL_NUMBER (db
) == CODE_LABEL_NUMBER (insn
))
3686 insn
= PREV_INSN (insn
);
3692 /* Validate combination of operands for parallel load/store instructions. */
3695 valid_parallel_load_store (operands
, mode
)
3697 enum machine_mode mode ATTRIBUTE_UNUSED
;
3699 rtx op0
= operands
[0];
3700 rtx op1
= operands
[1];
3701 rtx op2
= operands
[2];
3702 rtx op3
= operands
[3];
3704 if (GET_CODE (op0
) == SUBREG
)
3705 op0
= SUBREG_REG (op0
);
3706 if (GET_CODE (op1
) == SUBREG
)
3707 op1
= SUBREG_REG (op1
);
3708 if (GET_CODE (op2
) == SUBREG
)
3709 op2
= SUBREG_REG (op2
);
3710 if (GET_CODE (op3
) == SUBREG
)
3711 op3
= SUBREG_REG (op3
);
3713 /* The patterns should only allow ext_low_reg_operand() or
3714 par_ind_operand() operands. Thus of the 4 operands, only 2
3715 should be REGs and the other 2 should be MEMs. */
3717 /* This test prevents the multipack pass from using this pattern if
3718 op0 is used as an index or base register in op2 or op3, since
3719 this combination will require reloading. */
3720 if (GET_CODE (op0
) == REG
3721 && ((GET_CODE (op2
) == MEM
&& reg_mentioned_p (op0
, XEXP (op2
, 0)))
3722 || (GET_CODE (op3
) == MEM
&& reg_mentioned_p (op0
, XEXP (op3
, 0)))))
3726 if (GET_CODE (op0
) == REG
&& GET_CODE (op2
) == REG
)
3727 return (REGNO (op0
) != REGNO (op2
))
3728 && GET_CODE (op1
) == MEM
&& GET_CODE (op3
) == MEM
3729 && ! c4x_address_conflict (op1
, op3
, 0, 0);
3732 if (GET_CODE (op1
) == REG
&& GET_CODE (op3
) == REG
)
3733 return GET_CODE (op0
) == MEM
&& GET_CODE (op2
) == MEM
3734 && ! c4x_address_conflict (op0
, op2
, 1, 1);
3737 if (GET_CODE (op0
) == REG
&& GET_CODE (op3
) == REG
)
3738 return GET_CODE (op1
) == MEM
&& GET_CODE (op2
) == MEM
3739 && ! c4x_address_conflict (op1
, op2
, 0, 1);
3742 if (GET_CODE (op1
) == REG
&& GET_CODE (op2
) == REG
)
3743 return GET_CODE (op0
) == MEM
&& GET_CODE (op3
) == MEM
3744 && ! c4x_address_conflict (op0
, op3
, 1, 0);
3751 valid_parallel_operands_4 (operands
, mode
)
3753 enum machine_mode mode ATTRIBUTE_UNUSED
;
3755 rtx op0
= operands
[0];
3756 rtx op2
= operands
[2];
3758 if (GET_CODE (op0
) == SUBREG
)
3759 op0
= SUBREG_REG (op0
);
3760 if (GET_CODE (op2
) == SUBREG
)
3761 op2
= SUBREG_REG (op2
);
3763 /* This test prevents the multipack pass from using this pattern if
3764 op0 is used as an index or base register in op2, since this combination
3765 will require reloading. */
3766 if (GET_CODE (op0
) == REG
3767 && GET_CODE (op2
) == MEM
3768 && reg_mentioned_p (op0
, XEXP (op2
, 0)))
3776 valid_parallel_operands_5 (operands
, mode
)
3778 enum machine_mode mode ATTRIBUTE_UNUSED
;
3781 rtx op0
= operands
[0];
3782 rtx op1
= operands
[1];
3783 rtx op2
= operands
[2];
3784 rtx op3
= operands
[3];
3786 if (GET_CODE (op0
) == SUBREG
)
3787 op0
= SUBREG_REG (op0
);
3788 if (GET_CODE (op1
) == SUBREG
)
3789 op1
= SUBREG_REG (op1
);
3790 if (GET_CODE (op2
) == SUBREG
)
3791 op2
= SUBREG_REG (op2
);
3793 /* The patterns should only allow ext_low_reg_operand() or
3794 par_ind_operand() operands. Operands 1 and 2 may be commutative
3795 but only one of them can be a register. */
3796 if (GET_CODE (op1
) == REG
)
3798 if (GET_CODE (op2
) == REG
)
3804 /* This test prevents the multipack pass from using this pattern if
3805 op0 is used as an index or base register in op3, since this combination
3806 will require reloading. */
3807 if (GET_CODE (op0
) == REG
3808 && GET_CODE (op3
) == MEM
3809 && reg_mentioned_p (op0
, XEXP (op3
, 0)))
3817 valid_parallel_operands_6 (operands
, mode
)
3819 enum machine_mode mode ATTRIBUTE_UNUSED
;
3822 rtx op0
= operands
[0];
3823 rtx op1
= operands
[1];
3824 rtx op2
= operands
[2];
3825 rtx op4
= operands
[4];
3826 rtx op5
= operands
[5];
3828 if (GET_CODE (op1
) == SUBREG
)
3829 op1
= SUBREG_REG (op1
);
3830 if (GET_CODE (op2
) == SUBREG
)
3831 op2
= SUBREG_REG (op2
);
3832 if (GET_CODE (op4
) == SUBREG
)
3833 op4
= SUBREG_REG (op4
);
3834 if (GET_CODE (op5
) == SUBREG
)
3835 op5
= SUBREG_REG (op5
);
3837 /* The patterns should only allow ext_low_reg_operand() or
3838 par_ind_operand() operands. Thus of the 4 input operands, only 2
3839 should be REGs and the other 2 should be MEMs. */
3841 if (GET_CODE (op1
) == REG
)
3843 if (GET_CODE (op2
) == REG
)
3845 if (GET_CODE (op4
) == REG
)
3847 if (GET_CODE (op5
) == REG
)
3850 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3851 Perhaps we should count the MEMs as well? */
3855 /* This test prevents the multipack pass from using this pattern if
3856 op0 is used as an index or base register in op4 or op5, since
3857 this combination will require reloading. */
3858 if (GET_CODE (op0
) == REG
3859 && ((GET_CODE (op4
) == MEM
&& reg_mentioned_p (op0
, XEXP (op4
, 0)))
3860 || (GET_CODE (op5
) == MEM
&& reg_mentioned_p (op0
, XEXP (op5
, 0)))))
3867 /* Validate combination of src operands. Note that the operands have
3868 been screened by the src_operand predicate. We just have to check
3869 that the combination of operands is valid. If FORCE is set, ensure
3870 that the destination regno is valid if we have a 2 operand insn. */
3873 c4x_valid_operands (code
, operands
, mode
, force
)
3876 enum machine_mode mode ATTRIBUTE_UNUSED
;
3881 enum rtx_code code1
;
3882 enum rtx_code code2
;
3884 if (code
== COMPARE
)
3895 if (GET_CODE (op1
) == SUBREG
)
3896 op1
= SUBREG_REG (op1
);
3897 if (GET_CODE (op2
) == SUBREG
)
3898 op2
= SUBREG_REG (op2
);
3900 code1
= GET_CODE (op1
);
3901 code2
= GET_CODE (op2
);
3903 if (code1
== REG
&& code2
== REG
)
3906 if (code1
== MEM
&& code2
== MEM
)
3908 if (c4x_S_indirect (op1
) && c4x_S_indirect (op2
))
3910 return c4x_R_indirect (op1
) && c4x_R_indirect (op2
);
3921 if (c4x_J_constant (op2
) && c4x_R_indirect (op1
))
3926 if (! c4x_H_constant (op2
))
3930 /* Any valid memory operand screened by src_operand is OK. */
3933 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3934 into a stack slot memory address comprising a PLUS and a
3940 fatal_insn ("c4x_valid_operands: Internal error", op2
);
3944 /* Check that we have a valid destination register for a two operand
3946 return ! force
|| code
== COMPARE
|| REGNO (op1
) == REGNO (operands
[0]);
3949 /* We assume MINUS is commutative since the subtract patterns
3950 also support the reverse subtract instructions. Since op1
3951 is not a register, and op2 is a register, op1 can only
3952 be a restricted memory operand for a shift instruction. */
3953 if (code
== ASHIFTRT
|| code
== LSHIFTRT
3954 || code
== ASHIFT
|| code
== COMPARE
)
3956 && (c4x_S_indirect (op1
) || c4x_R_indirect (op1
));
3961 if (c4x_J_constant (op1
) && c4x_R_indirect (op2
))
3966 if (! c4x_H_constant (op1
))
3970 /* Any valid memory operand screened by src_operand is OK. */
3978 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3979 into a stack slot memory address comprising a PLUS and a
3989 /* Check that we have a valid destination register for a two operand
3991 return ! force
|| REGNO (op1
) == REGNO (operands
[0]);
3995 int valid_operands (code
, operands
, mode
)
3998 enum machine_mode mode
;
4001 /* If we are not optimizing then we have to let anything go and let
4002 reload fix things up. instantiate_decl in function.c can produce
4003 invalid insns by changing the offset of a memory operand from a
4004 valid one into an invalid one, when the second operand is also a
4005 memory operand. The alternative is not to allow two memory
4006 operands for an insn when not optimizing. The problem only rarely
4007 occurs, for example with the C-torture program DFcmp.c. */
4009 return ! optimize
|| c4x_valid_operands (code
, operands
, mode
, 0);
4014 legitimize_operands (code
, operands
, mode
)
4017 enum machine_mode mode
;
4019 /* Compare only has 2 operands. */
4020 if (code
== COMPARE
)
4022 /* During RTL generation, force constants into pseudos so that
4023 they can get hoisted out of loops. This will tie up an extra
4024 register but can save an extra cycle. Only do this if loop
4025 optimisation enabled. (We cannot pull this trick for add and
4026 sub instructions since the flow pass won't find
4027 autoincrements etc.) This allows us to generate compare
4028 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4029 of LDI *AR0++, R0; CMPI 42, R0.
4031 Note that expand_binops will try to load an expensive constant
4032 into a register if it is used within a loop. Unfortunately,
4033 the cost mechanism doesn't allow us to look at the other
4034 operand to decide whether the constant is expensive. */
4036 if (! reload_in_progress
4039 && GET_CODE (operands
[1]) == CONST_INT
4040 && preserve_subexpressions_p ()
4041 && rtx_cost (operands
[1], code
) > 1)
4042 operands
[1] = force_reg (mode
, operands
[1]);
4044 if (! reload_in_progress
4045 && ! c4x_valid_operands (code
, operands
, mode
, 0))
4046 operands
[0] = force_reg (mode
, operands
[0]);
4050 /* We cannot do this for ADDI/SUBI insns since we will
4051 defeat the flow pass from finding autoincrement addressing
4053 if (! reload_in_progress
4054 && ! ((code
== PLUS
|| code
== MINUS
) && mode
== Pmode
)
4057 && GET_CODE (operands
[2]) == CONST_INT
4058 && preserve_subexpressions_p ()
4059 && rtx_cost (operands
[2], code
) > 1)
4060 operands
[2] = force_reg (mode
, operands
[2]);
4062 /* We can get better code on a C30 if we force constant shift counts
4063 into a register. This way they can get hoisted out of loops,
4064 tying up a register, but saving an instruction. The downside is
4065 that they may get allocated to an address or index register, and
4066 thus we will get a pipeline conflict if there is a nearby
4067 indirect address using an address register.
4069 Note that expand_binops will not try to load an expensive constant
4070 into a register if it is used within a loop for a shift insn. */
4072 if (! reload_in_progress
4073 && ! c4x_valid_operands (code
, operands
, mode
, TARGET_FORCE
))
4075 /* If the operand combination is invalid, we force operand1 into a
4076 register, preventing reload from having doing to do this at a
4078 operands
[1] = force_reg (mode
, operands
[1]);
4081 emit_move_insn (operands
[0], operands
[1]);
4082 operands
[1] = copy_rtx (operands
[0]);
4086 /* Just in case... */
4087 if (! c4x_valid_operands (code
, operands
, mode
, 0))
4088 operands
[2] = force_reg (mode
, operands
[2]);
4092 /* Right shifts require a negative shift count, but GCC expects
4093 a positive count, so we emit a NEG. */
4094 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
4095 && (GET_CODE (operands
[2]) != CONST_INT
))
4096 operands
[2] = gen_rtx_NEG (mode
, negate_rtx (mode
, operands
[2]));
4102 /* The following predicates are used for instruction scheduling. */
4105 group1_reg_operand (op
, mode
)
4107 enum machine_mode mode
;
4109 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4111 if (GET_CODE (op
) == SUBREG
)
4112 op
= SUBREG_REG (op
);
4113 return REG_P (op
) && (! reload_completed
|| IS_GROUP1_REG (op
));
4118 group1_mem_operand (op
, mode
)
4120 enum machine_mode mode
;
4122 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4125 if (GET_CODE (op
) == MEM
)
4128 if (GET_CODE (op
) == PLUS
)
4130 rtx op0
= XEXP (op
, 0);
4131 rtx op1
= XEXP (op
, 1);
4133 if ((REG_P (op0
) && (! reload_completed
|| IS_GROUP1_REG (op0
)))
4134 || (REG_P (op1
) && (! reload_completed
|| IS_GROUP1_REG (op1
))))
4137 else if ((REG_P (op
)) && (! reload_completed
|| IS_GROUP1_REG (op
)))
4145 /* Return true if any one of the address registers. */
4148 arx_reg_operand (op
, mode
)
4150 enum machine_mode mode
;
4152 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4154 if (GET_CODE (op
) == SUBREG
)
4155 op
= SUBREG_REG (op
);
4156 return REG_P (op
) && (! reload_completed
|| IS_ADDR_REG (op
));
4161 c4x_arn_reg_operand (op
, mode
, regno
)
4163 enum machine_mode mode
;
4166 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4168 if (GET_CODE (op
) == SUBREG
)
4169 op
= SUBREG_REG (op
);
4170 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4175 c4x_arn_mem_operand (op
, mode
, regno
)
4177 enum machine_mode mode
;
4180 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4183 if (GET_CODE (op
) == MEM
)
4186 switch (GET_CODE (op
))
4195 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4199 if (REG_P (XEXP (op
, 0)) && (! reload_completed
4200 || (REGNO (XEXP (op
, 0)) == regno
)))
4202 if (REG_P (XEXP (XEXP (op
, 1), 1))
4203 && (! reload_completed
4204 || (REGNO (XEXP (XEXP (op
, 1), 1)) == regno
)))
4210 rtx op0
= XEXP (op
, 0);
4211 rtx op1
= XEXP (op
, 1);
4213 if ((REG_P (op0
) && (! reload_completed
4214 || (REGNO (op0
) == regno
)))
4215 || (REG_P (op1
) && (! reload_completed
4216 || (REGNO (op1
) == regno
))))
4230 ar0_reg_operand (op
, mode
)
4232 enum machine_mode mode
;
4234 return c4x_arn_reg_operand (op
, mode
, AR0_REGNO
);
4239 ar0_mem_operand (op
, mode
)
4241 enum machine_mode mode
;
4243 return c4x_arn_mem_operand (op
, mode
, AR0_REGNO
);
4248 ar1_reg_operand (op
, mode
)
4250 enum machine_mode mode
;
4252 return c4x_arn_reg_operand (op
, mode
, AR1_REGNO
);
4257 ar1_mem_operand (op
, mode
)
4259 enum machine_mode mode
;
4261 return c4x_arn_mem_operand (op
, mode
, AR1_REGNO
);
4266 ar2_reg_operand (op
, mode
)
4268 enum machine_mode mode
;
4270 return c4x_arn_reg_operand (op
, mode
, AR2_REGNO
);
4275 ar2_mem_operand (op
, mode
)
4277 enum machine_mode mode
;
4279 return c4x_arn_mem_operand (op
, mode
, AR2_REGNO
);
4284 ar3_reg_operand (op
, mode
)
4286 enum machine_mode mode
;
4288 return c4x_arn_reg_operand (op
, mode
, AR3_REGNO
);
4293 ar3_mem_operand (op
, mode
)
4295 enum machine_mode mode
;
4297 return c4x_arn_mem_operand (op
, mode
, AR3_REGNO
);
4302 ar4_reg_operand (op
, mode
)
4304 enum machine_mode mode
;
4306 return c4x_arn_reg_operand (op
, mode
, AR4_REGNO
);
4311 ar4_mem_operand (op
, mode
)
4313 enum machine_mode mode
;
4315 return c4x_arn_mem_operand (op
, mode
, AR4_REGNO
);
4320 ar5_reg_operand (op
, mode
)
4322 enum machine_mode mode
;
4324 return c4x_arn_reg_operand (op
, mode
, AR5_REGNO
);
4329 ar5_mem_operand (op
, mode
)
4331 enum machine_mode mode
;
4333 return c4x_arn_mem_operand (op
, mode
, AR5_REGNO
);
4338 ar6_reg_operand (op
, mode
)
4340 enum machine_mode mode
;
4342 return c4x_arn_reg_operand (op
, mode
, AR6_REGNO
);
4347 ar6_mem_operand (op
, mode
)
4349 enum machine_mode mode
;
4351 return c4x_arn_mem_operand (op
, mode
, AR6_REGNO
);
4356 ar7_reg_operand (op
, mode
)
4358 enum machine_mode mode
;
4360 return c4x_arn_reg_operand (op
, mode
, AR7_REGNO
);
4365 ar7_mem_operand (op
, mode
)
4367 enum machine_mode mode
;
4369 return c4x_arn_mem_operand (op
, mode
, AR7_REGNO
);
4374 ir0_reg_operand (op
, mode
)
4376 enum machine_mode mode
;
4378 return c4x_arn_reg_operand (op
, mode
, IR0_REGNO
);
4383 ir0_mem_operand (op
, mode
)
4385 enum machine_mode mode
;
4387 return c4x_arn_mem_operand (op
, mode
, IR0_REGNO
);
4392 ir1_reg_operand (op
, mode
)
4394 enum machine_mode mode
;
4396 return c4x_arn_reg_operand (op
, mode
, IR1_REGNO
);
4401 ir1_mem_operand (op
, mode
)
4403 enum machine_mode mode
;
4405 return c4x_arn_mem_operand (op
, mode
, IR1_REGNO
);
4409 /* This is similar to operand_subword but allows autoincrement
4413 c4x_operand_subword (op
, i
, validate_address
, mode
)
4416 int validate_address
;
4417 enum machine_mode mode
;
4419 if (mode
!= HImode
&& mode
!= HFmode
)
4420 fatal_insn ("c4x_operand_subword: invalid mode", op
);
4422 if (mode
== HFmode
&& REG_P (op
))
4423 fatal_insn ("c4x_operand_subword: invalid operand", op
);
4425 if (GET_CODE (op
) == MEM
)
4427 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
4428 enum machine_mode mode
= GET_MODE (XEXP (op
, 0));
4429 enum machine_mode submode
;
4434 else if (mode
== HFmode
)
4441 return gen_rtx_MEM (submode
, XEXP (op
, 0));
4447 /* We could handle these with some difficulty.
4448 e.g., *p-- => *(p-=2); *(p+1). */
4449 fatal_insn ("c4x_operand_subword: invalid autoincrement", op
);
4455 fatal_insn ("c4x_operand_subword: invalid address", op
);
4457 /* Even though offsettable_address_p considers (MEM
4458 (LO_SUM)) to be offsettable, it is not safe if the
4459 address is at the end of the data page since we also have
4460 to fix up the associated high PART. In this case where
4461 we are trying to split a HImode or HFmode memory
4462 reference, we would have to emit another insn to reload a
4463 new HIGH value. It's easier to disable LO_SUM memory references
4464 in HImode or HFmode and we probably get better code. */
4466 fatal_insn ("c4x_operand_subword: address not offsettable", op
);
4473 return operand_subword (op
, i
, validate_address
, mode
);
4478 struct name_list
*next
;
4482 static struct name_list
*global_head
;
4483 static struct name_list
*extern_head
;
4486 /* Add NAME to list of global symbols and remove from external list if
4487 present on external list. */
4490 c4x_global_label (name
)
4493 struct name_list
*p
, *last
;
4495 /* Do not insert duplicate names, so linearly search through list of
4500 if (strcmp (p
->name
, name
) == 0)
4504 p
= (struct name_list
*) xmalloc (sizeof *p
);
4505 p
->next
= global_head
;
4509 /* Remove this name from ref list if present. */
4514 if (strcmp (p
->name
, name
) == 0)
4517 last
->next
= p
->next
;
4519 extern_head
= p
->next
;
4528 /* Add NAME to list of external symbols. */
4531 c4x_external_ref (name
)
4534 struct name_list
*p
;
4536 /* Do not insert duplicate names. */
4540 if (strcmp (p
->name
, name
) == 0)
4545 /* Do not insert ref if global found. */
4549 if (strcmp (p
->name
, name
) == 0)
4553 p
= (struct name_list
*) xmalloc (sizeof *p
);
4554 p
->next
= extern_head
;
4564 struct name_list
*p
;
4566 /* Output all external names that are not global. */
4570 fprintf (fp
, "\t.ref\t");
4571 assemble_name (fp
, p
->name
);
4575 fprintf (fp
, "\t.end\n");
4580 c4x_check_attribute (attrib
, list
, decl
, attributes
)
4582 tree list
, decl
, *attributes
;
4584 while (list
!= NULL_TREE
4585 && IDENTIFIER_POINTER (TREE_PURPOSE (list
))
4586 != IDENTIFIER_POINTER (DECL_NAME (decl
)))
4587 list
= TREE_CHAIN (list
);
4589 *attributes
= tree_cons (get_identifier (attrib
), TREE_VALUE (list
),
4595 c4x_insert_attributes (decl
, attributes
)
4596 tree decl
, *attributes
;
4598 switch (TREE_CODE (decl
))
4601 c4x_check_attribute ("section", code_tree
, decl
, attributes
);
4602 c4x_check_attribute ("const", pure_tree
, decl
, attributes
);
4603 c4x_check_attribute ("noreturn", noreturn_tree
, decl
, attributes
);
4604 c4x_check_attribute ("interrupt", interrupt_tree
, decl
, attributes
);
4608 c4x_check_attribute ("section", data_tree
, decl
, attributes
);
4616 /* Table of valid machine attributes. */
4617 const struct attribute_spec c4x_attribute_table
[] =
4619 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4620 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4621 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4622 "interrupt"; should it be accepted here? */
4623 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4624 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4625 { NULL
, 0, 0, false, false, false, NULL
}
4628 /* Handle an attribute requiring a FUNCTION_TYPE;
4629 arguments as in struct attribute_spec.handler. */
4631 c4x_handle_fntype_attribute (node
, name
, args
, flags
, no_add_attrs
)
4634 tree args ATTRIBUTE_UNUSED
;
4635 int flags ATTRIBUTE_UNUSED
;
4638 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4640 warning ("`%s' attribute only applies to functions",
4641 IDENTIFIER_POINTER (name
));
4642 *no_add_attrs
= true;
4649 /* !!! FIXME to emit RPTS correctly. */
4652 c4x_rptb_rpts_p (insn
, op
)
4655 /* The next insn should be our label marking where the
4656 repeat block starts. */
4657 insn
= NEXT_INSN (insn
);
4658 if (GET_CODE (insn
) != CODE_LABEL
)
4660 /* Some insns may have been shifted between the RPTB insn
4661 and the top label... They were probably destined to
4662 be moved out of the loop. For now, let's leave them
4663 where they are and print a warning. We should
4664 probably move these insns before the repeat block insn. */
4666 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4671 /* Skip any notes. */
4672 insn
= next_nonnote_insn (insn
);
4674 /* This should be our first insn in the loop. */
4675 if (! INSN_P (insn
))
4678 /* Skip any notes. */
4679 insn
= next_nonnote_insn (insn
);
4681 if (! INSN_P (insn
))
4684 if (recog_memoized (insn
) != CODE_FOR_rptb_end
)
4690 return (GET_CODE (op
) == CONST_INT
) && TARGET_RPTS_CYCLES (INTVAL (op
));
4694 /* Check if register r11 is used as the destination of an insn. */
4707 if (INSN_P (x
) && GET_CODE (PATTERN (x
)) == SEQUENCE
)
4708 x
= XVECEXP (PATTERN (x
), 0, XVECLEN (PATTERN (x
), 0) - 1);
4710 if (INSN_P (x
) && (set
= single_set (x
)))
4713 if (GET_CODE (x
) == REG
&& REGNO (x
) == R11_REGNO
)
4716 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4717 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4721 if (c4x_r11_set_p (XEXP (x
, i
)))
4724 else if (fmt
[i
] == 'E')
4725 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4726 if (c4x_r11_set_p (XVECEXP (x
, i
, j
)))
4733 /* The c4x sometimes has a problem when the insn before the laj insn
4734 sets the r11 register. Check for this situation. */
4737 c4x_check_laj_p (insn
)
4740 insn
= prev_nonnote_insn (insn
);
4742 /* If this is the start of the function no nop is needed. */
4746 /* If the previous insn is a code label we have to insert a nop. This
4747 could be a jump or table jump. We can find the normal jumps by
4748 scanning the function but this will not find table jumps. */
4749 if (GET_CODE (insn
) == CODE_LABEL
)
4752 /* If the previous insn sets register r11 we have to insert a nop. */
4753 if (c4x_r11_set_p (insn
))
4756 /* No nop needed. */
4761 /* Adjust the cost of a scheduling dependency. Return the new cost of
4762 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4763 A set of an address register followed by a use occurs a 2 cycle
4764 stall (reduced to a single cycle on the c40 using LDA), while
4765 a read of an address register followed by a use occurs a single cycle. */
4767 #define SET_USE_COST 3
4768 #define SETLDA_USE_COST 2
4769 #define READ_USE_COST 2
4772 c4x_adjust_cost (insn
, link
, dep_insn
, cost
)
4778 /* Don't worry about this until we know what registers have been
4780 if (flag_schedule_insns
== 0 && ! reload_completed
)
4783 /* How do we handle dependencies where a read followed by another
4784 read causes a pipeline stall? For example, a read of ar0 followed
4785 by the use of ar0 for a memory reference. It looks like we
4786 need to extend the scheduler to handle this case. */
4788 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4789 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4790 so only deal with insns we know about. */
4791 if (recog_memoized (dep_insn
) < 0)
4794 if (REG_NOTE_KIND (link
) == 0)
4798 /* Data dependency; DEP_INSN writes a register that INSN reads some
4802 if (get_attr_setgroup1 (dep_insn
) && get_attr_usegroup1 (insn
))
4803 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4804 if (get_attr_readarx (dep_insn
) && get_attr_usegroup1 (insn
))
4805 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4809 /* This could be significantly optimized. We should look
4810 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4811 insn uses ar0-ar7. We then test if the same register
4812 is used. The tricky bit is that some operands will
4813 use several registers... */
4814 if (get_attr_setar0 (dep_insn
) && get_attr_usear0 (insn
))
4815 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4816 if (get_attr_setlda_ar0 (dep_insn
) && get_attr_usear0 (insn
))
4817 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4818 if (get_attr_readar0 (dep_insn
) && get_attr_usear0 (insn
))
4819 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4821 if (get_attr_setar1 (dep_insn
) && get_attr_usear1 (insn
))
4822 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4823 if (get_attr_setlda_ar1 (dep_insn
) && get_attr_usear1 (insn
))
4824 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4825 if (get_attr_readar1 (dep_insn
) && get_attr_usear1 (insn
))
4826 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4828 if (get_attr_setar2 (dep_insn
) && get_attr_usear2 (insn
))
4829 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4830 if (get_attr_setlda_ar2 (dep_insn
) && get_attr_usear2 (insn
))
4831 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4832 if (get_attr_readar2 (dep_insn
) && get_attr_usear2 (insn
))
4833 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4835 if (get_attr_setar3 (dep_insn
) && get_attr_usear3 (insn
))
4836 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4837 if (get_attr_setlda_ar3 (dep_insn
) && get_attr_usear3 (insn
))
4838 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4839 if (get_attr_readar3 (dep_insn
) && get_attr_usear3 (insn
))
4840 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4842 if (get_attr_setar4 (dep_insn
) && get_attr_usear4 (insn
))
4843 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4844 if (get_attr_setlda_ar4 (dep_insn
) && get_attr_usear4 (insn
))
4845 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4846 if (get_attr_readar4 (dep_insn
) && get_attr_usear4 (insn
))
4847 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4849 if (get_attr_setar5 (dep_insn
) && get_attr_usear5 (insn
))
4850 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4851 if (get_attr_setlda_ar5 (dep_insn
) && get_attr_usear5 (insn
))
4852 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4853 if (get_attr_readar5 (dep_insn
) && get_attr_usear5 (insn
))
4854 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4856 if (get_attr_setar6 (dep_insn
) && get_attr_usear6 (insn
))
4857 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4858 if (get_attr_setlda_ar6 (dep_insn
) && get_attr_usear6 (insn
))
4859 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4860 if (get_attr_readar6 (dep_insn
) && get_attr_usear6 (insn
))
4861 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4863 if (get_attr_setar7 (dep_insn
) && get_attr_usear7 (insn
))
4864 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4865 if (get_attr_setlda_ar7 (dep_insn
) && get_attr_usear7 (insn
))
4866 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4867 if (get_attr_readar7 (dep_insn
) && get_attr_usear7 (insn
))
4868 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4870 if (get_attr_setir0 (dep_insn
) && get_attr_useir0 (insn
))
4871 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4872 if (get_attr_setlda_ir0 (dep_insn
) && get_attr_useir0 (insn
))
4873 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4875 if (get_attr_setir1 (dep_insn
) && get_attr_useir1 (insn
))
4876 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4877 if (get_attr_setlda_ir1 (dep_insn
) && get_attr_useir1 (insn
))
4878 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4884 /* For other data dependencies, the default cost specified in the
4888 else if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
)
4890 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4893 /* For c4x anti dependencies, the cost is 0. */
4896 else if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
4898 /* Output dependency; DEP_INSN writes a register that INSN writes some
4901 /* For c4x output dependencies, the cost is 0. */
4909 c4x_init_builtins ()
4911 tree endlink
= void_list_node
;
4913 builtin_function ("fast_ftoi",
4916 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4917 C4X_BUILTIN_FIX
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4918 builtin_function ("ansi_ftoi",
4921 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4922 C4X_BUILTIN_FIX_ANSI
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4924 builtin_function ("fast_imult",
4927 tree_cons (NULL_TREE
, integer_type_node
,
4928 tree_cons (NULL_TREE
,
4929 integer_type_node
, endlink
))),
4930 C4X_BUILTIN_MPYI
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4933 builtin_function ("toieee",
4936 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4937 C4X_BUILTIN_TOIEEE
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4938 builtin_function ("frieee",
4941 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4942 C4X_BUILTIN_FRIEEE
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4943 builtin_function ("fast_invf",
4946 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4947 C4X_BUILTIN_RCPF
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4953 c4x_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
4956 rtx subtarget ATTRIBUTE_UNUSED
;
4957 enum machine_mode mode ATTRIBUTE_UNUSED
;
4958 int ignore ATTRIBUTE_UNUSED
;
4960 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4961 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4962 tree arglist
= TREE_OPERAND (exp
, 1);
4968 case C4X_BUILTIN_FIX
:
4969 arg0
= TREE_VALUE (arglist
);
4970 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4971 r0
= protect_from_queue (r0
, 0);
4972 if (! target
|| ! register_operand (target
, QImode
))
4973 target
= gen_reg_rtx (QImode
);
4974 emit_insn (gen_fixqfqi_clobber (target
, r0
));
4977 case C4X_BUILTIN_FIX_ANSI
:
4978 arg0
= TREE_VALUE (arglist
);
4979 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4980 r0
= protect_from_queue (r0
, 0);
4981 if (! target
|| ! register_operand (target
, QImode
))
4982 target
= gen_reg_rtx (QImode
);
4983 emit_insn (gen_fix_truncqfqi2 (target
, r0
));
4986 case C4X_BUILTIN_MPYI
:
4989 arg0
= TREE_VALUE (arglist
);
4990 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4991 r0
= expand_expr (arg0
, NULL_RTX
, QImode
, 0);
4992 r1
= expand_expr (arg1
, NULL_RTX
, QImode
, 0);
4993 r0
= protect_from_queue (r0
, 0);
4994 r1
= protect_from_queue (r1
, 0);
4995 if (! target
|| ! register_operand (target
, QImode
))
4996 target
= gen_reg_rtx (QImode
);
4997 emit_insn (gen_mulqi3_24_clobber (target
, r0
, r1
));
5000 case C4X_BUILTIN_TOIEEE
:
5003 arg0
= TREE_VALUE (arglist
);
5004 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5005 r0
= protect_from_queue (r0
, 0);
5006 if (! target
|| ! register_operand (target
, QFmode
))
5007 target
= gen_reg_rtx (QFmode
);
5008 emit_insn (gen_toieee (target
, r0
));
5011 case C4X_BUILTIN_FRIEEE
:
5014 arg0
= TREE_VALUE (arglist
);
5015 if (TREE_CODE (arg0
) == VAR_DECL
|| TREE_CODE (arg0
) == PARM_DECL
)
5016 put_var_into_stack (arg0
);
5017 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5018 r0
= protect_from_queue (r0
, 0);
5019 if (register_operand (r0
, QFmode
))
5021 r1
= assign_stack_local (QFmode
, GET_MODE_SIZE (QFmode
), 0);
5022 emit_move_insn (r1
, r0
);
5025 if (! target
|| ! register_operand (target
, QFmode
))
5026 target
= gen_reg_rtx (QFmode
);
5027 emit_insn (gen_frieee (target
, r0
));
5030 case C4X_BUILTIN_RCPF
:
5033 arg0
= TREE_VALUE (arglist
);
5034 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5035 r0
= protect_from_queue (r0
, 0);
5036 if (! target
|| ! register_operand (target
, QFmode
))
5037 target
= gen_reg_rtx (QFmode
);
5038 emit_insn (gen_rcpfqf_clobber (target
, r0
));
5045 c4x_asm_named_section (name
, flags
)
5047 unsigned int flags ATTRIBUTE_UNUSED
;
5049 fprintf (asm_out_file
, "\t.sect\t\"%s\"\n", name
);
5053 c4x_globalize_label (stream
, name
)
5057 default_globalize_label (stream
, name
);
5058 c4x_global_label (name
);